'use strict'; module.exports = function (locals) { const async = require('async'), assert = require('assert'), crypto = require('crypto'), debug = require('debug')('agm:job'), ObjectId = require('mongodb').ObjectId, { Job, JobLog, App, AppFile, AppDetail, Customer, JobAssign, Vehicle, Pilot, RptVar, User } = require('../model'), Currencies = require('../helpers/currencies'), path = require('path'), fs = require('fs-extra'), moment = require('moment'), turf = require('@turf/turf'), uniqid = require('uniqid'), utils = require('../helpers/utils'), jobUtil = require('../helpers/job_util'), geoUtil = require('../helpers/geo_util'), polyUtil = require('../helpers/poly_util'), mongoUtil = require('../helpers/mongo'), { enhancedRunInTransaction, runWithSessionOrTransaction } = require('../helpers/mongo_enhanced'), polylabel = require('polylabel'), cloneDeep = require('clone-deep'), simplify = require('simplify-path'), webUtil = require('../helpers/web_util'), { JobStatus, JobInvoiceStatus } = require('../helpers/job_constants'), { Units, Errors, DEFAULT_LANG, CostingItemType, UserTypes, AssignStatus, PartnerTasks, SystemTypes, RateUnits } = require('../helpers/constants'), { AppParamError, AppError, AppAuthError, AppInputError } = require('../helpers/app_error'), { getFormattedAddress, getDocumentCountry } = require('../helpers/user_helper'), env = require('../helpers/env'), redisCache = require('../helpers/redis_cache'), partnerSyncService = require('../services/partner_sync_service'), taskQHelper = require('../helpers/job_queue').getInstance(), { paginateWithCursor, validateCursorParams } = require('../helpers/cursor_pagination'), { buildDynamicFilter } = require('../helpers/dynamic_filter'), Joi = require('joi'); Joi.objectId = require('joi-objectid')(Joi); const JOB_FILTER_SCHEMA = { client: 'objectid', _id: 'objectid-text', orderNumber: 'text', name: 'text', startDate: 'date', endDate: 'date', createdAt: 'date-preset', status: 'numeric-enum', }; const JOBS_CACHE_TTL = env.JOBS_CACHE_TTL; /** * Build a deterministic cache key for the jobs list endpoint. * Normalises query params by sorting keys so that identical filter sets always * produce the same key regardless of the order params were appended. * @param {string} userScope - 'admin' or the user's puid string * @param {object} query - req.query * @returns {string} */ function buildJobsListCacheKey(userScope, query) { const normalized = JSON.stringify( Object.fromEntries(Object.keys(query).sort().map(k => [k, query[k]])) ); const hash = crypto.createHash('sha256').update(normalized).digest('hex').slice(0, 16); return `jobs:list:${userScope}:${hash}`; } /** * Invalidate all cached jobs-list entries for a given puid scope and for the * admin scope (since admin queries span all accounts). * Fire-and-forget — errors are silently swallowed so they never block a response. * @param {string|ObjectId} puid */ function invalidateJobsListCache(puid) { if (!JOBS_CACHE_TTL) return; redisCache.delByPattern(`jobs:list:${puid}:*`).catch(() => {}); redisCache.delByPattern('jobs:list:admin:*').catch(() => {}); } /** * Handles the GET request to retrieve a list of jobs based on the provided filters. * * @async * @function getJobs_get * @param {Object} req.query - The query parameters from the request. * @param {string} [req.query.clientId] - The ID of the client to filter jobs by. * @param {boolean} [req.query.jpo] - Whether to filter jobs by the pilot's ID. * @param {string} [req.query.byTime] - A time range to filter jobs by creation date. E.g. '365d' for 365 days or '3m' for 3 months or '2021' for the year 2021 * or an array of ISO 8601 date strings for a date range or a ISO 8601 date. E.g.: ?byTime[]=2022-01-01&byTime[]=2025-04-02 or ?byTime=2022-01-01 * @param {number} [req.query.status] - The status of the jobs to filter by. * @throws {AppAuthError} If the user information is not provided in the request. * @throws {AppError} If the pilot does not exist when filtering by pilot. * @returns {Promise} Sends a JSON response containing the list of jobs. * * @description * This function retrieves a list of jobs from the database based on the provided query parameters. * It supports filtering by client ID, pilot ID, time range, and job status. The function constructs * a MongoDB aggregation pipeline to fetch and process the job data, including client and job log * information. The resulting jobs are returned as a JSON response. */ async function getJobs_get(req, res) { const userInfo = req.userInfo; if (!userInfo) AppAuthError.throw(); // Determine scope for cache key once so it can be reused for both read and write. const userScope = req.ut === UserTypes.ADMIN ? 'admin' : String(userInfo.puid); if (JOBS_CACHE_TTL) { const cached = await redisCache.get(buildJobsListCacheKey(userScope, req.query)); if (cached) return res.json(cached); } const filtersJson = req.query['filters']; let filter = { markedDelete: { $in: [null, false] } }; let dynFilter = {}; if (filtersJson) { // Filter-submit path: all conditions come through the filters param. // Non-admin users are always scoped to their own master account (byPuid) // to prevent cross-account data leakage. if (req.ut !== UserTypes.ADMIN) { filter['byPuid'] = ObjectId(userInfo.puid); } dynFilter = buildDynamicFilter(filtersJson, JOB_FILTER_SCHEMA); !env.PRODUCTION && debug('dynFilter: %j', dynFilter); } else { // Legacy reload path: use individual query params. const clientId = req.query['clientId']; Object.assign(filter, utils.isObjectId(clientId) ? { client: ObjectId(clientId) } : { byPuid: ObjectId(userInfo.puid) }); if (req.query['byTime']) { Object.assign(filter, mongoUtil.getDateFilter(req.query['byTime'], 'createdAt')); } const status = Number(req.query['status']); if (status && Object.values(JobStatus).includes(status)) { filter['status'] = status; } } // jpo (jobs by pilot) applies to both paths const jobsByPilot = utils.stringToBoolean(req.query['jpo']); if (jobsByPilot) { const pilot = await Pilot.findById(ObjectId(req.uid), '_id', { lean: true }); if (!pilot) AppError.throw(Errors.PILOT_NOT_EXIST); filter['operator'] = pilot._id; } const pipeline = [ { $match: filter }, ...(Object.keys(dynFilter).length > 0 ? [{ $match: dynFilter }] : []), { $project: { _id: 1, orderNumber: 1, name: 1, createdAt: 1, startDate: 1, endDate: 1, status: 1, client: 1, costings: 1, invoiceStatus: 1, invoiceId: 1 } }, { $lookup: { from: 'users', let: { client: "$client" }, pipeline: [ { $match: { $expr: { $and: [{ $eq: ["$_id", "$$client"] }] } } }, { $project: { name: 1, kind: 1 } } ], as: 'client' } }, { $unwind: "$client" }, { $lookup: { from: 'job_logs', let: { job_id: "$_id" }, pipeline: [ { $match: { $expr: { $and: [{ $eq: ["$job", "$$job_id"] }, { "$type": 2 }] } } }, { $project: { date: 1, user: 1 } }, { $group: { _id: "$user", date: { $max: "$date" } } }, { $lookup: { from: 'users', let: { user_id: "$_id" }, pipeline: [ { $match: { $expr: { $and: [{ $eq: ["$_id", "$$user_id"] }] } } }, { $project: { username: 1, tailNumber: 1, name: 1, _id: 0 } } ], as: 'userD' } }, { $unwind: "$userD" }, { $project: { _id: 0, date: 1, "user": { $ifNull: ["$userD.username", { $ifNull: ["$userD.tailNumber", "$userD.name"] }] } } }, { $sort: { date: -1, user: 1 } } ], as: 'by' } }, { $project: { _id: 1, orderNumber: 1, name: 1, createdAt: 1, startDate: 1, endDate: 1, status: 1, client: 1, costings: 1, invoiceId: 1, invoiceStatus: { $ifNull: ['$invoiceStatus', JobInvoiceStatus.NONE] }, by: { $cond: { if: { $and: [{ $gt: [{ $size: "$by" }, 0] }, { $eq: ["$status", JobStatus.DOWNLOADED] }] }, then: "$by", else: "$$REMOVE" } }, } } ]; const jobs = await Job.aggregate(pipeline); if (JOBS_CACHE_TTL) { redisCache.set(buildJobsListCacheKey(userScope, req.query), jobs, JOBS_CACHE_TTL).catch(() => {}); } res.json(jobs); } async function createJob_post(req, res) { const _job = req.body; if (!_job) AppParamError.throw(); if (!utils.isEmptyArray(_job?.costings?.items)) { _job.costings = handleCostingItems(_job.costings); } if (_job._id === 0) { _job.cloneId = +_job.cloneId; delete _job.rptOp; } delete _job._id; const job = new Job(req.body); // To assign properties quickly // If it is a cloned Job, find the original job then copy items to the new job. if (!!(_job.cloneId)) { const orgJob = await Job.findById(_job.cloneId, 'sprayAreas excludedAreas bufs waypoints places byPuid').lean(); if (orgJob) { for (let p in orgJob) { if (p != "_id") job[p] = orgJob[p]; } // Make sure appRate consistent between job and its spray areas if (!utils.isEmptyArray(job.sprayAreas)) { for (let i = 0; i < job.sprayAreas.length; i++) { if (job.sprayAreas[i].properties.appRate != job.appRate) { job.sprayAreas[i].properties.appRate = job.appRate; } } } } job.set('createdAt', undefined, { strict: false }); job.set('updatedAt', undefined, { strict: false }); } const userInfo = req.userInfo; if (!userInfo) AppAuthError.throw(); job.byPuid = userInfo.puid; const savedJob = await job.save(); const insertedJob = savedJob.toObject(); insertedJob.client = _job.client; insertedJob.product = _job.product; insertedJob.operator = _job.operator; insertedJob.vehicle = _job.vehicle; res.json(insertedJob); invalidateJobsListCache(req.userInfo?.puid); } async function getJob_get(req, res) { const jobId = req.params.job_id; const withItems = req.query['withItems'] == "true" ? true : false; const withLines = req.query['withLines'] == "true" ? true : false; let exludeFields = withItems ? '' : '-excludedAreas -bufs -waypoints -places -heading -masterPoint'; if (!withLines) exludeFields += '-lines'; if (!Number(jobId) || !utils.isNumber(Number(jobId))) AppParamError.throw(); const job = await Job.findById(jobId) .select(exludeFields) .populate({ path: 'client', select: 'name' }) .populate({ path: 'operator', select: 'name' }) .populate({ path: 'vehicle', select: { 'name': 1 } }) .populate({ path: 'crop', select: 'name', skipInvalidIds: true }) .populate(withItems ? '' : 'products.product') .populate(withItems ? 'sprayAreas.properties.crop' : '', 'name'); if (!job) AppError.throw(Errors.JOB_NOT_FOUND); const _job = job.toObject(); _job.hasItems = !(utils.isEmptyArray(_job.sprayAreas)); if (!withItems) delete _job.sprayAreas; let jobwSumAreas; // Get sum of all spray areas if the area was not set for the job if (!withItems && (!_job.loadOp || !_job.loadOp.area)) { jobwSumAreas = await Job.aggregate( [ { $match: { _id: _job._id } }, { $unwind: { path: "$sprayAreas" } }, { $group: { _id: null, totalArea: { $sum: "$sprayAreas.properties.area" }, } } ] ); } if (!withItems) { if (jobwSumAreas && jobwSumAreas.length && jobwSumAreas[0].totalArea) { _job.loadOp = jobUtil.defLoadOp(_job.loadOp); _job.loadOp.area = Number(utils.toArea(jobwSumAreas[0].totalArea, _job.measureUnit, false).toFixed(1)); // sqm2 to ha or acre } } res.json(_job); } function handleCostingItems(inputCostings) { const costingItemsSchema = Joi.object().keys({ billableArea: Joi.number().min(0).optional().default(0), billableAmount: Joi.number().min(0).required().default(0), currency: Joi.string().valid(...Object.keys(Currencies)).optional(), items: Joi.array().items({ item: Joi.objectId().required(), name: Joi.string().required(), price: Joi.number().min(0).required(), quantity: Joi.number().min(0).required(), type: Joi.number().valid(...Object.values(CostingItemType)), unit: Joi.number().valid(...Object.values(Units)), }).required() }); const { error, value } = costingItemsSchema.options({ stripUnknown: true }).validate(inputCostings); if (error) AppInputError.throw(error.details[0].message) const { items, billableArea, billableAmount, currency } = value; return { currency, billableArea, billableAmount: utils.toFixedNumber(billableAmount), items }; } async function updateJob_put(req, res) { /* Param Object: { job: Job, updateItems: boolean, updateStatus?: boolean, delSprItems: any[], useDefRate } */ if (!req.params.job_id || !req.body.job) AppParamError.throw(); delete req.body.job._id; let upJob = {}; const updateItems = req.body.updateItems || false; const useDefRate = req.body.useDefRate || false; const inJob = req.body.job; if (!updateItems) { upJob = inJob; delete upJob.sprayAreas; delete upJob.excludedAreas; delete upJob.waypoints; delete upJob.bufs; delete upJob.places; delete upJob.rptOp; // Not overwrite report options, only update after Prreview report delete upJob.weatherInfo; // Not overwrite weather info, only update after Preview report delete upJob.invoiceStatus; delete upJob.invoiceId; if (upJob.client && upJob.client._id) upJob.client = upJob.client._id; if (upJob.product && upJob.product._id) upJob.product = upJob.product._id; if (upJob.operator && upJob.operator._id) upJob.operator = upJob.operator._id; if (upJob.vehicle && upJob.vehicle._id) upJob.vehicle = upJob.vehicle._id; } else { upJob.ttSprArea = inJob.ttSprArea; } if (updateItems) { if (req.body.updateStatus) upJob.status = inJob.status; const [areas, xcls, waypoints, places] = await Promise.all([ jobUtil.cleanAreasAsync(inJob.sprayAreas), jobUtil.cleanAreasAsync(inJob.excludedAreas), jobUtil.cleanGeoPointsAsync(inJob.waypoints), jobUtil.cleanGeoPointsAsync(inJob.places) ]); upJob.sprayAreas = areas; upJob.excludedAreas = xcls; upJob.waypoints = waypoints; upJob.places = places; upJob.bufs = inJob.bufs; } else if (useDefRate && inJob['appRate']) { await Job.updateMany( { _id: req.params.job_id, "sprayAreas.properties": { $exists: true } }, { $set: { "sprayAreas.$[].properties.appRate": inJob.appRate } }); } if (!utils.isEmptyArray(inJob?.costings?.items)) { upJob.costings = handleCostingItems(inJob.costings); } const job = await Job.findOneAndUpdate({ _id: req.params.job_id }, upJob, { new: true }) // NOTES: disable validation for now .populate({ path: 'client', select: 'name' }) .populate({ path: 'operator', select: 'name' }) .populate({ path: 'vehicle', select: 'name' }) .populate('crop', 'name') .populate(updateItems ? '' : 'products.product') .populate(updateItems ? 'sprayAreas.properties.crop' : '', 'name'); if (!job) AppError.throw(Errors.JOB_NOT_FOUND); const retJob = job.toObject(); if (!updateItems) { retJob.hasItems = (retJob.sprayAreas.length > 0); delete retJob.sprayAreas; delete retJob.excludedAreas; delete retJob.bufs; delete retJob.waypoints; delete retJob.places; } else { // Delete gridlines of deleted spray areas const delSprItems = req.body.delSprItems; if (!utils.isEmptyArray(delSprItems)) await jobUtil.deleteAreaLines(delSprItems.map(a => ObjectId(a))); } res.json(retJob); invalidateJobsListCache(req.userInfo?.puid); } async function deleteJob(req, res) { const job = await Job.findById(req.params.job_id); const puid = req.userInfo?.puid || job?.byPuid; if (job) await job.removeFull(); res.json({ ok: true }).end(); invalidateJobsListCache(puid); } /** * Apply drift, checking and skipping or splitting into spray segments * RULES: If the deposit location is NOT inside any XCLs, and if drift position is on an XCL, don’t plot or paint spray on */ function setDriftSegs(seg, xclZones, refUTM) { if (utils.isEmptyArray(seg)) return seg; const llUTM = new locals.LatLonUTM(0, 0), utmBack = locals.UTM.newInstance(refUTM.zone, refUTM.hemisphere, 0, 0); let cur = 0, orgUtmPnt, driftedLL, depLL, segs = [], skip; let _seg = cloneDeep(seg); // Clone the seg for adding drifts while (cur <= (_seg.length - 1)) { skip = false; llUTM.lat = _seg[cur].lat, llUTM.lon = _seg[cur].lon; orgUtmPnt = llUTM.toUtm(refUTM.zone, refUTM.hemisphere); // Current ll to UTM if (utils.isNumber(_seg[cur].driftX) && utils.isNumber(_seg[cur].driftY) && (_seg[cur].driftX !== 0.0 || _seg[cur].driftY !== 0.0)) { utmBack.easting = orgUtmPnt.easting + _seg[cur].driftX; utmBack.northing = orgUtmPnt.northing + _seg[cur].driftY; driftedLL = utmBack.toLatLon(); _seg[cur].lat = driftedLL.lat, _seg[cur].lon = driftedLL.lon; } if (!utils.isEmptyArray(xclZones) && utils.isNumber(_seg[cur].depositX) && utils.isNumber(_seg[cur].depositX)) { if (polyUtil.isPointinPolys(_seg[cur].lat, _seg[cur].lon, xclZones)) { utmBack.easting = orgUtmPnt.easting + _seg[cur].depositX; utmBack.northing = orgUtmPnt.northing + _seg[cur].depositY; depLL = utmBack.toLatLon(); // Scan until get excluded point => add the previous point to end a segment if (!polyUtil.isPointinPolys(depLL.lat, depLL.lng, xclZones)) { // Skip the point or split seg here if (segs.length && segs[segs.length - 1].length && cur != segs.length - 1) segs.push([]); skip = true; } } } if (!skip) { if (!segs.length) segs[0] = [_seg[cur]]; else segs[segs.length - 1].push(_seg[cur]); } cur++; } return segs; } function applyDrifts(segs, xclZones, refUTM) { let _segs = [], appliedSegs; for (let i = 0; i < segs.length; i++) { appliedSegs = setDriftSegs(segs[i], xclZones, refUTM); appliedSegs && (_segs = [..._segs, ...appliedSegs]); } return _segs; } /** * Get Job Data by Id * @param {*} jobId * @param {*} selectFields * @param {*} Ops Query Options with * { * wApps: with application info or not true/false, * dataOp 0: spray inside paths only, 1: spray paths, 2: flight paths only, 3: both spray-in and flight paths, 4: spray and flight paths, * wFileId: true/false whether to include fileId or not * } * @returns wApps true ? { jobId: job._id, measureUnit: job.measureUnit, apps: [], fileIds: [], data: [appFileData] } : [appFileData] * appFileData { id:(appfile Id), file: filename, data: (arrays of sprayed segments) [[[lat, lon]]] } **/ async function getAppDataByJobId(jobId, selectFields, { wApps = false, dataOp = 0, wFileId = false, withJob = false }) { const job = await Job.findById(jobId).lean(); if (!job) AppError.throw(Errors.JOB_NOT_FOUND); const retData = wApps ? withJob ? ({ job: job, apps: [], fileIds: [], data: [] }) : ({ jobId: job._id, measureUnit: job.measureUnit, apps: [], fileIds: [], data: [] }) : []; const projection = { _id: 1, fileName: 1, totalSprayed: 1, startDateTime: 1, endDateTime: 1, totalFlightTime: 1, totalSprayTime: 1, totalTurnTime: 1 }; const _apps = await App.find({ 'jobId': jobId, 'status': 3, 'totalSprayed': { $ne: null }, markedDelete: { $ne: true } }, projection).sort({ 'startDateTime': 1 }).lean(); if (utils.isEmptyArray(_apps)) return retData; retData.apps = _apps; let afSels = '_id name totalSprayed'; if (job.status === JobStatus.ARCHIVED) afSels += ' data'; const appFiles = await AppFile.find({ appId: { $in: _apps.map(it => it._id) } }, afSels).sort("agn").lean(); if (job.status === JobStatus.ARCHIVED) { appFiles.map(af => { const afData = { file: af.name, data: af.data }; if (wApps) { retData.fileIds.push(af._id); retData.data.push(afData) } else retData.push(afData); }); return retData; } if (!utils.isEmptyArray(appFiles)) { let refUTM; if (dataOp != 2) { // Calculate reference UTM Zone on job's areas let allAreas = [...job.sprayAreas || [], ...job.excludedAreas || []]; for (let zone of allAreas) zone.type = 'Feature'; if (!utils.isEmptyArray(allAreas)) { const centerP = turf.center({ type: "FeatureCollection", features: allAreas }); if (centerP) { const point = turf.getCoord(centerP); refUTM = new locals.LatLonUTM(point[1], point[0]).toUtm(); } } } let afDetails; for await (const appFile of appFiles) { if (wApps) retData.fileIds.push(appFile._id); // .sort('gpsTime') // No need, to avoid pass midnight reset gpsTime data issue afDetails = await AppDetail.find({ 'fileId': appFile._id }).select(selectFields + ' driftX driftY depositX depositY').lean(); if (!utils.isEmptyArray(afDetails)) { // Handle the worst case if job has no areas. if (!refUTM) refUTM = new locals.LatLonUTM(afDetails[0].lat, afDetails[0].lon).toUtm(); let segs = [], fsegs = [], sprayOp = dataOp < 2 ? dataOp : (dataOp - 2) - 1, fileData; if (dataOp != 2) { segs = !(/^.*.asc$/i.test(appFile.name)) && sprayOp < 1 ? getSprayOnSegments(afDetails, true) : getSprayOnSegments(afDetails, false); segs = applyDrifts(segs, job.excludedAreas, refUTM); for (let k = 0; k < segs.length; k++) { if (segs[k].length > 2) { const lonlats = segs[k].map(it => [it.lon, it.lat]); const simplified = simplify(lonlats, 0.00001); segs[k] = simplified.map(it => [it[1], it[0]]); } else segs[k] = segs[k].map(it => [it.lat, it.lon]); } } fileData = { file: appFile.name, data: segs }; if (wFileId) fileData.id = appFile._id; if (dataOp >= 2) { fsegs = getFlightSegments(afDetails); for (let k = 0; k < fsegs.length; k++) { if (!fsegs[k].length) continue; if (fsegs[k].length > 2) { const lonlats = fsegs[k].map(it => [it.lon, it.lat]); const simplified = simplify(lonlats, 0.000019); fsegs[k] = simplified.map(it => [it[1], it[0]]); } else fsegs[k] = fsegs[k].map(it => [it.lat, it.lon]); } fileData['fdata'] = fsegs; } if (wApps) retData.data.push(fileData); else retData.push(fileData); } } } return retData; } function getFlightSegments(data) { if (!data || utils.isEmptyArray(data)) return []; let start = 0, cur = 1, seg = [], segs = []; const MAX_DIST_MET = 100; while (cur < data.length) { if ((geoUtil.distance([data[cur - 1].lat, data[cur - 1].lon], [data[cur].lat, data[cur].lon]) >= MAX_DIST_MET) || cur === data.length - 1) { seg = data.slice(start, (cur + 1)); if (seg.length > 1) { segs.push(seg); } seg = []; start = cur; } cur++; } return segs; } /** * Get Spray-On segments * @param {*} data * @param {*} sprayInOnly */ function getSprayOnSegments(data, sprayInOnly) { if (!data || utils.isEmptyArray(data)) return []; let llnum = data[0].llnum; let start = 0, cur = 1, seg = [], segs = []; while (cur < data.length) { if (data[cur - 1].sprayStat == 0 && data[cur].sprayStat == 0 || data[cur - 1].sprayStat == 0 && data[cur].sprayStat != 0) { llnum = data[cur].llnum; start++; cur++; continue; } if (llnum !== data[cur].llnum || data[cur].sprayStat === 3 || endSegChecker(data[cur].sprayStat, data[cur - 1].sprayStat) || (geoUtil.distance([data[cur - 1].lat, data[cur - 1].lon], [data[cur].lat, data[cur].lon]) >= 1) || (sprayInOnly && (data[cur].sprayStat - data[cur - 1].sprayStat) > 99) || cur === data.length - 1) { seg = data.slice(start, cur); if (seg.length > 1) { if (sprayInOnly) { // Trim ouside points at beginning or end while (seg.length && (seg[0].satsIn < 99 || seg[0].sprayStat == 0)) seg.shift(); let g = seg.length - 1; while (g > 0) { if (seg[g].satsIn < 99) { seg.splice(g, 1); g--; } else break; } } if (seg.length > 1) segs.push(seg); } seg = []; start = cur; } llnum = data[cur].llnum; cur++; } return segs; } function endSegChecker(cur, prev) { return (cur === 0 && prev === 1) || (cur === 0 && prev === 10) || (cur === 3 && prev === 0) || (cur === 0 && prev === 3) || (cur <= 0 && prev > 0 || (cur === 3 && prev === 1)); } async function getData_post(req, res) { const jobId = req.body.jobId; if (!jobId) AppParamError.throw(); const wApps = (req.body.inside != undefined); let dataOp = req.body.dataOp || 0; if (wApps) dataOp = req.body.inside == 0 ? 1 : 0; // '-_id lat lon head alt grSpeed sprayStat lminApp timeAdv swath gpsTime' const jobAppData = await getAppDataByJobId(jobId, '-_id lat lon sprayStat llnum gpsTime satsIn stdHdop', { wApps, dataOp }); let result = { jobId: jobId, data: jobAppData ? (wApps ? jobAppData['data'] : jobAppData) : [] }; // Get the aggregated weather info if (wApps && jobAppData && !utils.isEmptyArray(jobAppData.fileIds)) { const wi = await jobUtil.getDataWeatherInfo(jobAppData.fileIds); if (wi && wi.length) result['weatherInfo'] = ({ windSpd: utils.mpSecToKnot(wi[0]['avgWindSpd']).toFixed(1), windDir: utils.deg2Compass(wi[0]['avgWindDir']), temp: utils.inCorF(wi[0]['avgTemp'], jobAppData.measureUnit, false), humid: utils.truncR(wi[0]['avgHumid'], 0) }); } res.json(result ? result : []); } /** * Get Report Setting values for the job. * @returns If both coverage and actualVol values exist, return them to the client's request as is otherwise gather from imported files. * Notes: the values's units are based on the job's measurement system */ async function getReportOps_get(req, res) { const jobId = req.body.jobId; if (!jobId) return res.json(null).end(); let cvrVal = 0, actVol = 0; const _job = await Job.findById(jobId, { rptOp: 1, measureUnit: 1, appRate: 1, appRateUnit: 1, swathWidth: 1 }).lean(); if (!_job) return res.json(null).end(); // Ignore error for now even if the job is being or was just deleted const numOfApps = await App.countDocuments({ jobId: _job._id }); if (numOfApps) { if (_job.rptOp) { if (_job.rptOp.coverage) cvrVal = utils.toArea(_job.rptOp.coverage, _job.measureUnit, true); if (_job.rptOp.actualVol) actVol = utils.toVolume(_job.rptOp.actualVol, (_job.appRateUnit !== RateUnits.LBS_PER_ACRE && _job.appRateUnit !== RateUnits.KG_PER_HA), _job.measureUnit); } if (!cvrVal || !actVol) { const results = await App.aggregate([ { $match: { jobId: jobId, markedDelete: { $ne: true } } }, { $group: { _id: null, coverage: { $sum: "$totalSprayed" }, totalLength: { $sum: "$totalSprLength" }, actualVol: { $sum: "$totalSprayMat" } } }]); // Total Coverage = Total Coverage from AgNav data + Total Coverage by Length (Non-AgNav data) if (results && results.length > 0) { if (!cvrVal) { if (results[0].coverage) cvrVal = utils.toArea(results[0].coverage, _job.measureUnit, true); if (results[0].totalLength && _job.swathWidth) cvrVal += utils.toArea((results[0].totalLength * utils.toMeter(_job.swathWidth, _job.measureUnit)) * 1e-4, _job.measureUnit, true); } if (!actVol) { if (results[0].actualVol) actVol = utils.toVolume(results[0].actualVol, (_job.appRateUnit !== RateUnits.LBS_PER_ACRE && _job.appRateUnit !== RateUnits.KG_PER_HA), _job.measureUnit); } } } } const rptOp = { areaSize: _job.rptOp && _job.rptOp.areaSize ? utils.toArea(_job.rptOp.areaSize, _job.measureUnit, true) : 0, printArea: (_job.rptOp && _job.rptOp['printArea'] !== undefined) ? _job.rptOp['printArea'] : true, coverage: cvrVal, appRate: _job.appRate, useActualVol: (_job.rptOp && _job.rptOp['useActualVol'] !== undefined) ? _job.rptOp['useActualVol'] : false, actualVol: actVol }; res.json(rptOp); } async function preAppReport_post(req, res) { const input = req.body; const jobId = input.jobId; let job, sprayData, hasData = false, lang = input.lang || DEFAULT_LANG, rptDS; const theJob = await Job.findById(jobId) .populate({ path: 'client', select: '-password', populate: { path: 'Country', model: 'Country', select: 'code name -_id', foreignField: 'code', // Join on the 'code' field in Country model localField: 'country' // Match with the 'country' field in client } }) .populate({ path: 'operator', select: '-password', populate: { path: 'Country', model: 'Country', select: 'code name -_id', foreignField: 'code', // Join on the 'code' field in Country model localField: 'country' // Match with the 'country' field in operator } }) .populate({ path: 'vehicle', select: '-password' }) .populate('products.product', 'name type restricted epaReg') .populate('crop', 'name'); if (!theJob) AppError.throw(Errors.JOB_NOT_FOUND); job = theJob.toObject(); // Save report settings to the corresponding job. let updateVars = {}; if (input.rptOp) { const rptOp = Object.assign({}, input.rptOp); // Check and convert report settings's values to Metric if the job is in US Measurement if (job.measureUnit) { rptOp.coverage = utils.acreToHa(rptOp.coverage); rptOp.areaSize = utils.acreToHa(rptOp.areaSize); rptOp.actualVol = utils.toMetricVolume(rptOp.actualVol, (job.appRateUnit !== Units.LB && job.appRateUnit !== Units.KG), job.measureUnit); } updateVars["rptOp"] = rptOp; } updateVars["useCustWI"] = input.useCustWI; updateVars["weatherInfo"] = input.weatherInfo; if (Object.keys(updateVars).length) { // Update the custom weather info to the job. const updatedJob = await Job.findOneAndUpdate({ _id: jobId }, { $set: updateVars }, { new: true, lean: true }); if (!updatedJob) AppError.throw(Errors.JOB_NOT_FOUND); job.useCustWI = updatedJob.useCustWI; job.rptOp = updatedJob.rptOp; job.weatherInfo = updatedJob.weatherInfo; } const genFolder = uniqid(`app_${jobId}_`); const tempFolder = path.join(env.TEMP_DIR, 'report', genFolder); const targetFolder = path.join(env.REPORT_DIR, 'dat', genFolder); let reportWebTempPath = `${req.protocol}://${req.hostname}/report/${genFolder}/`; job.sprayAreas = await setPolysWCenter(job.sprayAreas); job.excludedAreas = await setPolysWCenter(job.excludedAreas); const customer = await Customer.findOne({ '_id': job.byPuid }, '-password', { lean: true }) .populate({ path: 'Country', select: 'code name -_id', model: 'Country' }) .lean(); let dataOp = 2; if (input.showSprayed) { if (input.showFlights) dataOp = input.sprOp.dataOp == 0 ? 4 : 3; else dataOp = input.sprOp.dataOp == 0 ? 1 : 0; } if (input.showSprayed || input.showFlights) { sprayData = await getAppDataByJobId(jobId, '-_id lat lon sprayStat llnum gpsTime xTrack satsIn', { wApps: true, dataOp }); hasData = !!(sprayData && !utils.isEmptyArray(sprayData.data)); } // Copy template file to temp folder await fs.copy(path.join(process.cwd(), 'public/sprayMap.html'), path.join(tempFolder, 'sprayMap.html')); // Create data json file const data = { premium: (customer && customer.premium || 0), job: job, params: input.params, data: hasData ? sprayData.data : null, sprOp: input.sprOp, obs: input.obs || [], colors: input.colors || { sprayZone: 'blue', fpColor: 'lime' } }; await fs.writeFile(path.join(tempFolder, 'spraydata.js'), 'var req=' + JSON.stringify(data, null, 2) + ';', 'utf-8'); await fs.ensureDir(path.join(targetFolder, 'map')); // Capture the picture of the area with spray paths in a map webpage await webUtil.webShot({ url: reportWebTempPath + 'sprayMap.html', type: 'jpeg', quality: 90, width: input.params.width, height: input.params.height, path: path.join(targetFolder, 'map') + '.jpg' }); // Consolidate the Dataset and related data for the report so the client then can render the report in Report Viewer const numOfApps = sprayData && sprayData.apps ? sprayData.apps.length : 0; const startApp = numOfApps ? sprayData.apps[0] : null; let endApp; if (numOfApps > 0) { endApp = numOfApps === 1 ? startApp : sprayData.apps[sprayData.apps.length - 1]; } // TODO: Handle the case the actVol is preferred or input manually const totalSprayedArea = Number(input.rptOp.coverage); const appRate = input.rptOp && input.rptOp.appRate ? Number(input.rptOp.appRate) : job.appRate; let totalVolume = totalSprayedArea * appRate; let totalRateUnit = job.appRateUnit; if (job.measureUnit && job.appRateUnit === RateUnits.OZ_PER_ACRE) { totalVolume = utils.ozToGal(totalVolume); totalRateUnit = RateUnits.GAL_PER_ACRE; } let actVolAdj = 1; // Actual volume different adjustment factor if (input.rptOp.useActualVol && input.rptOp.actualVol > 0 && input.rptOp.actualVol !== totalVolume) { actVolAdj += (input.rptOp.actualVol - totalVolume) / totalVolume; totalVolume = totalVolume * actVolAdj; } moment.locale(lang); const planStartDate = moment(job.startDate); const planEndDate = moment(job.endDate); const actStartDate = moment.utc(startApp ? startApp.startDateTime : null); const actEndDate = moment.utc(endApp ? endApp.endDateTime : null); // Total area coverage used to calculate the Total Vol used of products const totalCoverage = utils.roundTo(!hasData ? input.totalArea : totalSprayedArea, 1); rptDS = makeJobAppDataSource(job, customer, `https://${req.hostname}/reports/dat/${genFolder}/map.jpg`, lang); let app = { appId: "app1", jobId: jobId, orderNum: job.orderNum || '', planStart: planStartDate.isValid() ? planStartDate.format("MMM DD, YYYY") : '', planEnd: planEndDate.isValid() ? planEndDate.format("MMM DD, YYYY") : '', appRate: '', actStart: '', actEnd: '', actStartTime: '', actEndTime: '', totalArea: '', totalSprayed: '', totalVolume: '', dataFile: '', windSpd: "", windDir: "", temp: "", humid: "", }; if (numOfApps) { rptDS.reports.type = 1; // Application Report app.appRate = utils.toLocaleStr(appRate, 2, lang) + ' ' + utils.rateUnitString(job.appRateUnit, true); app.actStart = actStartDate.isValid() ? actStartDate.format("MMM DD, YYYY") : ''; app.actEnd = actEndDate.isValid() ? actEndDate.format("MMM DD, YYYY") : ''; app.actStartTime = actStartDate.isValid() ? actStartDate.format('HH:mm:ss') : ''; app.actEndTime = actEndDate.isValid() ? actEndDate.format('HH:mm:ss') : ''; app.totalSprayed = totalSprayedArea ? utils.toLocaleStr(totalSprayedArea, 1, lang) + ' ' + utils.areaUnitString(job.measureUnit, true) : ''; // Total used volume, estimated = total applied area * appRate app.totalVolume = totalVolume ? utils.toLocaleStr(totalVolume, 1, lang) + ' ' + utils.rateUnitString(totalRateUnit, true, 1) : ''; app.dataFile = sprayData.apps.map(i => i.fileName).join(','); let totalFlightTime = 0, totalSprayTime = 0, totalTurnTime = 0; for (let i = 0; i < sprayData.apps.length; i++) { const app = sprayData.apps[i]; if (app.totalSprayTime) totalSprayTime += app.totalSprayTime; if (app.totalTurnTime) totalTurnTime += app.totalTurnTime; if (app.totalFlightTime) totalFlightTime += app.totalFlightTime; } app.totalFlightTime = utils.secondsToHMS(totalFlightTime, 2); app.totalSprayTime = utils.secondsToHMS(totalSprayTime, 2); app.totalTurnTime = utils.secondsToHMS(totalTurnTime, 2); } if (input.rptOp && input.rptOp.printArea && utils.isNumber(input.rptOp.areaSize)) { app.totalArea = `${utils.toLocaleStr(Number(input.rptOp.areaSize), 1, lang)} ${utils.areaUnitString(job.measureUnit, true)}`; } rptDS.apps.push(app); if (!utils.isEmptyArray(job.products)) { rptDS.products = []; let rate, unit, prod; for (let i = 0; i < job.products.length; i++) { rate = job.products[i].rate; unit = job.products[i].unit; prod = { id: job.products[i]._id, jobId: jobId, name: job.products[i].product.name, type: job.products[i].product.type, epaReg: job.products[i].product.epaReg || '', restricted: job.products[i].product.restricted || false, rateStr: utils.toLocaleStr(rate, 2, lang) + ' ' + utils.getProdUnit(unit) }; rate = (rate * totalCoverage) * actVolAdj; if (unit === Units.OZ) { unit = Units.GAL; rate = utils.ozToGal(rate); } prod["totalRateStr"] = utils.toLocaleStr(rate, 2, lang) + ' ' + utils.getProdUnit(unit); rptDS.products.push(prod); } } if (hasData) { // Use custom weather info if (job.useCustWI && job.weatherInfo) { rptDS.apps[0].windSpd = `${job.weatherInfo.windSpd} kt`; rptDS.apps[0].windDir = job.weatherInfo.windDir; rptDS.apps[0].temp = `${utils.truncR(job.weatherInfo.temp, 0)} ${(job.measureUnit ? "°F" : "°C")}`; rptDS.apps[0].humid = `${utils.truncR(job.weatherInfo.humid, 0)} %`; } else { // Use aggregated weather info from data const result = await jobUtil.getDataWeatherInfo(sprayData.fileIds); if (result && result.length > 0) { const windDir = (result[0].maxWindDir - result[0].minWindDir) > 22.5 ? utils.deg2Compass(result[0].minWindDir) + ' - ' + utils.deg2Compass(result[0].maxWindDir) : utils.deg2Compass(result[0].minWindDir); const windSpd = result[0].maxWindSpd > result[0].minWindSpd ? utils.toLocaleStr(utils.mpSecToKnot(result[0].minWindSpd), 1, lang) + ' - ' + utils.toLocaleStr(utils.mpSecToKnot(result[0].maxWindSpd), 1, lang) : utils.toLocaleStr(utils.mpSecToKnot(result[0].minWindSpd), 1, lang); rptDS.apps[0].windSpd = `${windSpd} kt`; rptDS.apps[0].windDir = windDir; rptDS.apps[0].temp = utils.inCorF(result[0].avgTemp, job.measureUnit, true); rptDS.apps[0].humid = `${utils.truncR(result[0].avgHumid, 0)} %`; } } } // Write the report datasource file to the temp folder await fs.writeFile(path.join(targetFolder, 'rptDS.json'), JSON.stringify(rptDS, null, 2), 'utf-8'); const sApplicatorId = customer && customer._id.toHexString(); const existed = await fs.pathExists(path.join(env.REPORT_DIR, `app_${sApplicatorId}.mrt`)); const reportId = existed ? `app_${sApplicatorId}` : 'app'; res.json({ rid: reportId, path: genFolder, c: reportId === 'app' ? 0 : 1 // Whether it is a customized report }); /* Notes: BC we want to keep created temporary files for a while so the customer can still view them in the report. Thus, they are will be deleted by the maintenainer app periodically */ } async function getRptVars_post(req, res) { const input = req.body; const vars = await RptVar.find({ $and: [{ user: ObjectId(req.uid) }, { rpt: input.rpt }] }); // TODO: Check with lean:true ops later res.json(!utils.isEmptyArray(vars) ? vars[0].toObject() : null); } async function setRptVars_post(req, res) { const ops = req.body; if (!ops['rpt']) AppParamError.throw(); await RptVar.deleteMany({ rpt: ops.rpt, user: ObjectId(req.uid) }); if (ops.vars) { await RptVar.insertMany({ rpt: String(ops.rpt), user: ObjectId(req.uid), vars: ops.vars }); } res.json({ ok: true }); } async function saveReport_post(req, res) { const ops = req.body; await fs.writeFile(path.join(env.REPORT_DIR, ops.rid + '.mrt'), ops.content); return res.json({ ok: true }); } //TODO: Make this as async fnc ??? function preLoadReport_post(req, res, next) { if (!req.body || !req.body.jobId || !req.body.loadOp || !req.body.loadOp.area) return next(AppParamError.create()); const input = req.body, jobId = input.jobId; input.loadOp.area = +input.loadOp.area; input.loadOp.loads = +input.loadOp.loads; let _job, rptDS; const lang = input.lang || DEFAULT_LANG; async.series([ function (callback) { Job.findById(jobId) .populate({ path: 'client', select: '-password' }) .populate({ path: 'operator', select: '-password' }) .populate({ path: 'vehicle', select: '-password' }) .populate('products.product', 'name type rate unit') .lean() .then(job => { if (!job) { callback(AppError.create(Errors.JOB_NOT_FOUND)); return; } _job = job; callback(); }) .catch(err => { callback(err); }); }, function (callback) { // Save report settings to the corresponding job. Job.findOneAndUpdate({ _id: jobId }, { $set: { "loadOp": Object.assign({}, input.loadOp) } }, { new: true, lean: true }, (err, data) => { if (err) throw err; if (!data) return callback(AppError.create(Errors.JOB_NOT_FOUND)); callback(); }); } ], function (err) { if (err) { return next(err); } const genFolder = uniqid(`loadsheet_${jobId}_`); const targetFolder = path.join(env.REPORT_DIR, 'dat', genFolder); rptDS = makeJobAppDataSource(_job, null, '', lang, input.loadOp); async.series([ function (callback) { moment.locale(lang); rptDS.reports = { date: moment(input.loadOp.date).format("MMM DD, YYYY") }; if (!utils.isEmptyArray(_job.products)) { rptDS.products = []; rptDS.loads = []; rptDS.load_details = []; let numLoads = input.loadOp.loads, totalLoad = _job.appRate * input.loadOp.area, areaPerLoad = (input.loadOp.area / numLoads); if (input.loadOp.loadType != 0 || totalLoad % input.loadOp.capacity == 0 || numLoads == 1) { rptDS.loads.push({ id: 1, loads: `1 - ${numLoads}`, areaPL: areaPerLoad, area: `${utils.toLocaleStr(areaPerLoad, 1, lang)}`, totalLoadStr: `${utils.toLocaleStr((totalLoad / numLoads), 1, lang)} ${utils.getProdUnit(_job.appRateUnit)}` }); } else { areaPerLoad = (input.loadOp.capacity / _job.appRate); rptDS.loads.push({ id: 1, loads: `1 - ${numLoads - 1}`, areaPL: areaPerLoad, area: `${utils.toLocaleStr(areaPerLoad, 1, lang)}`, totalLoadStr: `${utils.toLocaleStr((areaPerLoad * _job.appRate), 1, lang)} ${utils.getProdUnit(_job.appRateUnit)}` }); areaPerLoad = input.loadOp.area - (areaPerLoad * (input.loadOp.loads - 1)); rptDS.loads.push({ id: 2, loads: numLoads.toString(), areaPL: areaPerLoad, area: `${utils.toLocaleStr(areaPerLoad, 1, lang)}`, totalLoadStr: `${utils.toLocaleStr((areaPerLoad * _job.appRate), 1, lang)} ${utils.getProdUnit(_job.appRateUnit)}` }); } // Products let prdRate, totalRate, unit, prod, loadDetail; for (let i = 0; i < _job.products.length; i++) { prdRate = _job.products[i].rate; unit = _job.products[i].unit; totalRate = prdRate * input.loadOp.area; prod = { id: _job.products[i].product._id.toHexString(), name: _job.products[i].product.name, type: _job.products[i].product.type, rate: utils.toLocaleStr(prdRate, 2, lang), unit: utils.getProdUnit(unit), totalStr: `${utils.toLocaleStr(prdRate * input.loadOp.area, 2, lang)} ${utils.getProdUnit(unit)}` }; if (unit === Units.OZ) { prod.totalStr += '\n' + `${utils.toLocaleStr(utils.ozToGal(totalRate), 2, lang)} gal`; } rptDS.products.push(prod); // Load Details for (let j = 0; j < rptDS.loads.length; j++) { const load = rptDS.loads[j]; loadDetail = { loadId: load.id, prodId: prod.id, totalProdStr: `${utils.toLocaleStr((load.areaPL * prdRate), 1, lang)} ${prod.unit}` }; if (unit === Units.OZ) { loadDetail.totalProdStr += '\n' + `${utils.toLocaleStr(utils.ozToGal((load.areaPL * prdRate)), 1, lang)} gal`; } rptDS.load_details.push(loadDetail); } } } callback(); }, function (callback) { fs.ensureDir(targetFolder, err => { if (err) return callback(err); callback(); }) }, function (callback) { // Write the report datasource file to the temp folder fs.writeFile(path.join(targetFolder, 'rptDS.json'), JSON.stringify(rptDS, null, 2), 'utf-8', function (err) { if (err) return callback(err); callback(); }); } ], function (err) { if (err) { debug(err); return next(err); } res.json({ rid: 'loadsheet', path: genFolder, c: 0 }); // Temporary files to be deleted in the maintenainer app periodically }); }); } function makeJobAppDataSource(job, applicator, genImgPath, lang, loadOp) { const appDS = { reports: { type: 0 }, clients: [{ // The applicator is a client of its customers in report model. TODO: Refactor this to a better name later. Will require to update report templates too. id: applicator && applicator._id.toHexString(), name: utils.getField(applicator, "name"), address: getFormattedAddress(applicator), }], customers: [{ id: job.client._id.toHexString(), name: job.client.name || '', address: getFormattedAddress(job.client), contact: job.client.contact || '', phone: job.client.phone || '' }], pilots: [ { id: utils.getField(job.operator, "_id", "0"), name: utils.getField(job.operator, "name"), address: getFormattedAddress(job.operator), }], products: [{ // default empty product id: "0", jobId: job._id, name: "", epaReg: "", restricted: null, type: 1, rate: 0, unit: "", rateStr: "", totalRateStr: "" }], vehicles: [{ id: utils.getField(job.vehicle, "_id", "0"), name: utils.getField(job.vehicle, "name"), model: utils.getField(job.vehicle, "model") }], apps: [] }; // Add country information for all entities in the report data if (applicator) { appDS.clients[0].country = getDocumentCountry(applicator, true); } if (job.client) { appDS.customers[0].country = getDocumentCountry(job.client, true); } if (job.operator) { appDS.pilots[0].country = getDocumentCountry(job.operator, true); } appDS["jobs"] = [{ id: job._id, orderNum: job.orderNumber || "", clientId: applicator && applicator._id, vehicleId: job.vehicle ? job.vehicle._id.toHexString() : "0", customerId: job.client ? job.client._id.toHexString() : "0", pilotId: job.operator ? job.operator._id.toHexString() : "0", name: job.name || '', appRate: `${utils.toLocaleStr(job.appRate, 2, lang)} ${utils.rateUnitString(job.appRateUnit, true)}`, flight: job.flightNumber || '', farm: job.farm || '', crop: ((job.crop && job.crop['_id']) ? job.crop['name'] : job.crop) || '', remark: job.remark || '', measureUnit: job.measureUnit, mapfile: genImgPath, sysPsi: job.sysPsi, appType: job.appType, missionTime: job.missionTime }]; if (loadOp) { appDS.jobs[0].areaSize = loadOp.area; appDS.jobs[0].capacity = loadOp.capacity; appDS.jobs[0].loadType = loadOp.loadType; appDS.jobs[0].totalLoads = loadOp.loads; } return appDS; } // function updatePolyCenter(geoPolys) { // if (geoPolys === undefined || utils.isEmptyArray(geoPolys)) // return false; // try { // let geoP; // for (let i = 0; i < geoPolys.length; i++) { // geoP = polylabel(geoPolys[i].geometry.coordinates, 1.0); // if (geoP) // geoPolys[i].properties["center"] = [geoP[1], geoP[0]]; // } // } catch (error) { // debug('updatePolyCenter()', error); // return false; // } // } async function setPolyWCenter(item) { return new Promise(resolve => { try { const itemWPL = polylabel(item.geometry.coordinates, 1.0); if (itemWPL) item.properties["center"] = [itemWPL[1], itemWPL[0]]; } catch (error) { debug(error); } finally { resolve(item); } }); } async function setPolysWCenter(geoPolys) { return Promise.all(geoPolys.map(async item => await setPolyWCenter(item))); } async function getUploadedFiles_post(req, res) { const jobId = req.body.jobId; const job = await Job.findById(jobId, { '_id': 1 }); if (!job) AppError.throw(Errors.JOB_NOT_FOUND); const apps = await App.find({ 'jobId': job._id, 'status': 3, markedDelete: { $ne: true } }) .sort('-updateDate') .select('_id fileName savedFilename fileSize status proStatus totalSprayed createdDate') .lean(); return res.json(apps || []); } async function importStatus_post(req, res) { const appId = req.body.appId; if (!utils.isObjectId(appId)) AppParamError.throw(); const app = await App.findById(ObjectId(appId)) .select('_id fileName savedFilename fileSize status proStatus errorMsg warnMsg markedDelete') .lean(); res.json(app); } async function importingStatus_post(req, res) { let filterOps = { markedDelete: { $ne: true } }; // ✅ Exclude deleted applications if (req.body.jobId) { filterOps.jobId = req.body.jobId; filterOps.$or = [{ status: 1 }, { status: 2 }]; } else { if (!utils.isEmptyArray(req.body.appIds)) { const appIds = req.body.appIds.map(it => ObjectId(it)); filterOps._id = { $in: appIds }; } filterOps['byImport'] = true; } const apps = await App.aggregate([ { $match: filterOps }, { $lookup: { from: "jobs", let: { jobId: "$jobId" }, pipeline: [ { $match: { $expr: { $and: [ { $eq: ["$_id", "$$jobId"] }, { $eq: ["$byPuid", ObjectId(req.userInfo.puid)] } // ✅ Security: Only jobs owned by this applicator ] } } } ], as: "appjob" } }, { $unwind: { path: "$appjob" } }, // ✅ Remove preserveNullAndEmptyArrays - only keep apps with matching jobs { $lookup: { from: "users", localField: "appjob.client", foreignField: "_id", as: "jobclient" } }, { $unwind: { path: "$jobclient", "preserveNullAndEmptyArrays": true } }, { $project: { fileName: 1, fileSize: 1, status: 1, proStatus: 1, errorMsg: 1, jobId: 1, createdDate: 1, cid: '$appjob.client', cname: '$jobclient.name' } }, { $sort: { createdDate: - 1 } }, { $limit: 500 } ]); res.json(apps || []).end(); } async function deleteAppFile_post(req, res) { const _appId = req.body.appId; await jobUtil.deleteAppById(_appId, true); res.json({ appId: _appId }); } async function getJobLogs_post(req, res) { const jobId = req.body.jobId; const type = req.body.type || 2; const logs = await JobLog.find({ job: jobId, type: type }, 'date user') .limit(30) .sort('-date') .populate('user', '-_id username tailNumber name') .lean(); const result = (logs || []).map(log => ({ ...log, user: log.user ? { username: log.user.username || log.user.tailNumber || log.user.name } : null })); res.json(result); } /** * Assign a job to one or more users (aircraft/partners's aircraft) * * This function handles the core job assignment workflow for the AgMission platform. * It supports both internal users (aircraft) and external partner system users * (SatLoc, AgIDronex, etc.) with intelligent sync management. * * @route POST /api/jobs/assign * @param {Object} req.body - Assignment request data * @param {string} req.body.jobId - ID of the job to assign * @param {Object} req.body.dlOp - Download operation configuration * @param {number} req.body.dlOp.type - Download file format type ('0'=Map only, '1'=Ag-NAV, '2'=Ag-NAV Project, '3'=ESRI Shapefile, '4'=FAAA Obstacle) * @param {Array} req.body.asUsers - Array of users to assign the job to * @param {string} req.body.asUsers[].uid - User ID (internal user or partner system user) * @param {string} [req.body.asUsers[].notes] - Assignment-specific notes * @param {string} [req.body.asUsers[].jobName] - Partner-specific job name * @param {Array} [req.body.avUsers] - Array of un-assigned/available users (to remove from assignment) * @param {string} req.body.avUsers[].uid - User ID * * @description * **Assignment Workflow:** * 1. **Validation**: Validates job exists and request parameters * 2. **Download Options Update**: Updates job download configuration if changed * 3. **Assignment Cleanup**: Removes existing pending assignments and specified (unassigned) users * 4. **Duplicate Prevention**: Checks for already completed assignments to prevent duplicates * 5. **New Assignment Creation**: Creates new JobAssign records for specified users * 6. **Partner Integration**: For partner system users, handles intelligent sync: * - **Immediate Upload**: If partner API is live, attempts immediate job upload * - **Queue Fallback**: If API unavailable or upload fails, queues task for later * - **Status Tracking**: Updates assignment status based on upload success * - **Data Sync Scheduling**: Schedules automatic data retrieval tasks * * **Assignment Types:** * - **Internal Users**: Direct assignments to platform users (aircraft) * - **Partner System Users**: Assignments that sync with external partner systems * * **Status Management:** * - NEW (0): Assignment created, pending processing * - UPLOADED (2): Successfully uploaded to partner system * - PROCESSING: Data being processed from partner * - COMPLETED: Assignment (to partner system) fully processed * * **Error Handling:** * - Graceful degradation: Partner upload failures don't break assignment creation * - Automatic retry: Failed uploads are queued for later retry * - Status tracking: All states and errors are logged for monitoring * * @returns {Object} Response object * @returns {boolean} returns.ok - Success indicator * * @throws {AppParamError} When required parameters are missing * @throws {AppError} When job is not found (JOB_NOT_FOUND) * * @example * // Assign job to internal user * POST /api/jobs/assign * { * "jobId": "60a1b2c3d4e5f6789abcdef0", * "dlOp": { "type": 1 }, * "asUsers": [ * { "uid": "pilot_user_id" } * ] * } * * @example * // Assign job to internal user with partner integration metadata * POST /api/jobs/assign * { * "jobId": "60a1b2c3d4e5f6789abcdef0", * "dlOp": { "type": 1 }, * "asUsers": [ * { * "uid": "internal_user_id", * "notes": "High priority spray mission", * "jobName": "Field_A_Spray_2025" * } * ] * } * * @example * // Remove users and assign new ones * POST /api/jobs/assign * { * "jobId": "60a1b2c3d4e5f6789abcdef0", * "dlOp": { "type": 1 }, * "avUsers": [ * { "uid": "ac_user_id" } * ], * "asUsers": [ * { "uid": "ac_user_id" } * ] * } */ async function assign_post(req, res) { // Extract and validate request parameters const assignParams = req.body; if (!assignParams || !assignParams.jobId || !assignParams.dlOp || !assignParams.asUsers) { AppParamError.throw(); } // Track user IDs for assignment management let completedUserIds = []; let availableUserIds = []; // Verify the job exists and get current download options const job = await Job.findById(assignParams.jobId).select('dlOp'); if (!job) AppError.throw(Errors.JOB_NOT_FOUND); // Process users to remove from assignment (avUsers = "available users" to remove) if (!utils.isEmptyArray(assignParams.avUsers)) { for (const userToRemove of assignParams.avUsers) { if (ObjectId.isValid(userToRemove.uid)) { availableUserIds.push(ObjectId(userToRemove.uid)); } } } // Delete existing assignments with enhanced transaction await enhancedRunInTransaction(async (session) => { // Update job download options if they've changed if (assignParams.dlOp.type !== job.dlOp.type) { await Job.updateOne({ _id: assignParams.jobId }, { $set: { "dlOp.type": assignParams.dlOp.type } }, { session }); } // Delete existing assignments: either specific (unassigned in the case of reassignment) users or all pending assignments if (availableUserIds.length) { // Remove specific users OR all pending assignments for this job await JobAssign.deleteMany({ $or: [ { job: assignParams.jobId, status: AssignStatus.NEW }, // All pending { job: assignParams.jobId, user: { $in: availableUserIds } } // Specific (assigned to unassigned/ available) users ] }, { session }); } else { // Remove all pending new assignments if no specific users provided. TODO: Consider if this is desired behavior??? await JobAssign.deleteMany({ job: assignParams.jobId, status: AssignStatus.NEW }, { session }); } }); // Check for already completed assignments (prevent duplicates) let completedAssignments; if (!utils.isEmptyArray(assignParams.asUsers)) { // Extract user IDs from assignment requests const newAssignUserIds = []; for (const userAssign of assignParams.asUsers) { if (ObjectId.isValid(userAssign.uid)) { newAssignUserIds.push(ObjectId(userAssign.uid)); } } // Find current completed assignments (status > 0 (NEW)) for the job completedAssignments = await JobAssign.find({ job: assignParams.jobId, user: { $in: newAssignUserIds }, status: { $gt: AssignStatus.NEW } // Only completed/in-progress assignments }, 'user').lean(); } // Create new assignments for users who don't already have completed ones if (!utils.isEmptyArray(assignParams.asUsers)) { // Build list of users who already have completed assignments if (!utils.isEmptyArray(completedAssignments)) { completedUserIds = completedAssignments.map(assignment => assignment.user); } // Prepare new assignment records const newAssignRecords = []; for (const userAssign of assignParams.asUsers) { // Skip users who already have completed assignments if (!utils.objectIdIn(completedUserIds, userAssign.uid)) { // Get partner service and generate external job ID const user = await Vehicle.findById(userAssign.uid).populate('partnerInfo').lean(); let extJobId = assignParams.jobId; if (user && user.partnerInfo && user.partnerInfo.partnerCode) { const partnerServiceFactory = require('../services/partner_service_factory'); const partnerService = partnerServiceFactory.getService(user.partnerInfo.partnerCode); const systemType = user.partnerInfo.systemType || SystemTypes.NONE; extJobId = partnerService.generateJobId(job, systemType); } // Create base assignment record const assignData = { user: userAssign.uid, // Works for both internal users and partner system users job: assignParams.jobId, status: AssignStatus.NEW, extJobId: extJobId, // Set the generated external job ID }; // Add optional partner-specific metadata fields if (userAssign.notes) { assignData.notes = userAssign.notes; } newAssignRecords.push(assignData); } } // Insert new assignments and handle partner integration with transaction if (newAssignRecords.length) { // Use enhanced transaction for assignment creation let insertResult = await enhancedRunInTransaction(async (session) => { // Insert new assignments within transaction const result = await JobAssign.insertMany(newAssignRecords, { session }); return result; }); // Handle post-transaction partner operations (outside transaction to avoid long-running operations) if (insertResult && insertResult.length) { try { // Get inserted assignment IDs for async partner operations const insertedIds = insertResult.map(assign => assign._id); // Populate assignments again for async operations (outside transaction) const assignsWithDetails = await JobAssign.findByIdWithPartnerInfo(insertedIds); // Handle async partner operations after transaction for (const assignment of assignsWithDetails) { if (assignment.hasPartnerIntegration()) { // User (vehicle) has partner integration setImmediate(() => processPartnerAssignment(assignment)); } } } catch (partnerError) { // Log partner processing errors but don't fail the entire assignment operation debug('Error processing partner assignments:', partnerError); } } } } res.json({ ok: true }); } /** * Process partner assignment - handle partner integration for assignments * @param {object} assignment - Job assignment with populated user (vehicle), job, and user.partnerInfo */ async function processPartnerAssignment(assignment) { if (!assignment || !assignment.user || !assignment.job || !assignment.hasPartnerIntegration()) { debug('Invalid assignment data for partner processing:', assignment); return; } try { const partnerCode = assignment.getPartnerCode(); const customerId = assignment.user.parent || assignment.user._id; // Skip if no partner code if (!partnerCode) { debug(`No partner code found for user ${assignment.user._id}`); return; } // Check if partner service is available and API is live const isPartnerLive = await partnerSyncService.checkPartnerAPIHealth(partnerCode); if (isPartnerLive) { // Try immediate upload with atomic transaction try { // throw new Error('Immediate upload failed'); // Test only await runWithSessionOrTransaction(async (session) => { // Upload job to partner with session for atomic operations // This handles assignment status update and job logging internally const result = await partnerSyncService.uploadJobToPartner(assignment._id.toString(), { session }); if (result.success) { debug(`Successfully uploaded job to ${partnerCode} for assignment ${assignment._id}`); } else { throw new Error(`Upload failed: ${result.message || 'Unknown error'}`); } }); return; } catch (uploadError) { debug(`Immediate upload failed for ${partnerCode}:`, uploadError); // Fall through to queuing } } // API not live or upload failed - queue the task try { await taskQHelper.addTaskASync(PartnerTasks.UPLOAD_PARTNER_JOB, { assignId: assignment._id.toString(), jobId: assignment.job._id.toString(), partnerCode: partnerCode, customerId: customerId, partnerAircraftId: assignment.getPartnerAircraftId() }); debug(`Queued job upload task for ${partnerCode} assignment ${assignment._id}`); } catch (queueError) { debug(`Failed to queue upload task for ${partnerCode}:`, queueError); // Don't throw here as this is a background task - log the error } } catch (error) { debug('Error processing partner assignment:', error); // Note: No fallback queuing since we should have all required data from vehicles } } async function assignments_post(req, res) { const _jobId = Number(req.body.jobId); if (!_jobId) AppParamError.throw(Errors.JOB_NOT_FOUND); let _assignedUserIds; const assigment = { avUsers: [], asUsers: [] }; const job = await Job.findById(_jobId, '_id byPuid', { lean: true }); if (!job) AppError.throw(Errors.JOB_NOT_FOUND); const assignedUsers = await JobAssign.find({ job: _jobId }, '-_id user', { lean: true }); if (!utils.isEmptyArray(assignedUsers)) _assignedUserIds = assignedUsers.map(it => it?.user); const _availableUsers = await Vehicle.aggregate([ { $match: { markedDelete: { $ne: true }, parent: job.byPuid, $or: [ // Match vehicles with username { username: { $nin: [null, ''] } }, // Match vehicles with partner integration setup { $and: [ { 'partnerInfo.partner': { $ne: null } }, { 'partnerInfo.partnerAircraftId': { $nin: [null, ''] } } ] } ] } }, // Lookup assignment status for each vehicle/user { $lookup: { from: 'job_assigns', let: { vehicleId: '$_id' }, pipeline: [ { $match: { $expr: { $and: [ { $eq: ['$user', '$$vehicleId'] }, { $eq: ['$job', _jobId] } ] } } } ], as: 'assignments' } }, // Lookup partner information from users collection { $lookup: { from: 'users', let: { partnerId: '$partnerInfo.partner' }, pipeline: [ { $match: { $expr: { $and: [ { $eq: ['$_id', '$$partnerId'] }, { $eq: ['$kind', UserTypes.PARTNER] } ] } } }, { $project: { name: 1, partnerCode: 1 } } ], as: 'partnerDoc' } }, { $project: { _id: 0, uid: "$_id", name: 1, username: 1, active: 1, pkgActive: 1, tailNumber: { $ifNull: ['$tailNumber', ''] }, assignStatus: { $ifNull: [ { $arrayElemAt: ['$assignments.status', 0] }, null ] }, partnerInfo: { $cond: { if: { $and: [ { $ne: ['$partnerInfo.partner', null] }, { $gt: [{ $size: '$partnerDoc' }, 0] } ] }, then: { name: { $arrayElemAt: ['$partnerDoc.name', 0] }, partnerCode: { $arrayElemAt: ['$partnerDoc.partnerCode', 0] } }, else: '$$REMOVE' } } } }, ]); let avUsers = [], asUsers = []; // Available and Assigned Users if (!utils.isEmptyArray(_assignedUserIds)) { for (let i in _availableUsers) { if (utils.objectIdIn(_assignedUserIds, _availableUsers[i].uid)) { // For assigned users, keep the assignStatus and partnerInfo (if exists) asUsers.push(_availableUsers[i]); } else if (_availableUsers[i].active === true) { // For available users, remove assignStatus but keep partnerInfo (if exists) const { assignStatus, ...avUser } = _availableUsers[i]; avUsers.push(avUser); } } } else if (!utils.isEmptyArray(_availableUsers)) { // For available users, remove assignStatus from all but keep partnerInfo (if exists) avUsers = _availableUsers.map(user => { const { assignStatus, ...avUser } = user; return avUser; }); } if (!utils.isEmptyArray(avUsers)) assigment.avUsers = avUsers; if (!utils.isEmptyArray(asUsers)) assigment.asUsers = asUsers; res.json(assigment); } async function countByClient_post(req, res) { const clientId = req.body.clientId; if (!utils.isObjectId(clientId)) AppParamError.throw(); const count = await Job.countDocuments({ client: ObjectId(clientId) }); res.json(count); // Notes: Can not use res.json(number) or express error and the response will fail. } async function saveMapOps_post(req, res) { const mapOps = req.body.mapOps; if (!req.body.jobId || !mapOps || !mapOps.width || !mapOps.height || !mapOps.center || !mapOps.zoom) AppParamError.throw(); const data = await Job.findOneAndUpdate({ _id: req.body.jobId }, { $set: { "dlOp.mapOp": mapOps } }, { new: true, lean: true }); return res.json(data ? data.dlOp.mapOp : null); // Ingore job_not_found } async function searchJobs_post(req, res) { // { byPuid: this.authSvc.byPUserId, clientId: this.fClient?.value, nameId: this.fNameId?.trim(), status: this.fStatus?.value } const params = req.body; // console.log('params:', params); if (!params || !params.byPuid) AppParamError.throw(); const filter = { markedDelete: { $ne: true }, byPuid: ObjectId(params.byPuid) }; if (params.clientId && ObjectId.isValid(params.clientId)) filter['client'] = ObjectId(params.clientId); if (params.nameId) { if (!utils.isNumber(Number(params.nameId))) filter['name'] = { $regex: new RegExp(`${params.nameId}`, 'i') }; else filter['_id'] = Number(params.nameId); } if (utils.isNumber(params.status)) filter['status'] = params.status; const jobs = await Job.find(filter, '_id name sprayAreas excludedAreas measureUnit appRate appRateUnit status invoiceStatus').sort('_id'); res.json(jobs); } /** * Normalize application file metadata to standardized format * Handles both old format (pre-normalization) and new format metadata * * @param {Object} meta - Raw metadata from application file * @param {string} fileName - File name to help determine data source type * @param {Object} jobContext - Optional job context for fallback values * @param {number} jobContext.appRateUnit - Job's application rate unit (RateUnits enum) * @param {boolean} jobContext.measureUnit - Job's measure unit (false=US, true=Metric) * @returns {Object} Normalized metadata with standard fields */ function normalizeAppFileMeta(meta, fileName = '', jobContext = null) { if (!meta) return null; // If already normalized (has type or matType field), return as-is if (meta.type || meta.matType) { return meta; } const { FCTypes, DataTypes, MatTypes, RateUnits } = require('../helpers/constants'); // Create normalized metadata - keep ALL original fields const normalized = { ...meta }; // Determine source type if not present if (!normalized.type) { const lowerFileName = fileName.toLowerCase(); // Infer from file extension if (lowerFileName.endsWith('.log')) { normalized.type = DataTypes.SATLOC; } else if (lowerFileName.endsWith('.dbf') || /\.t\d*$/.test(lowerFileName)) { // .dbf or .t* (e.g., .t01, .t12, .t) normalized.type = DataTypes.AGNAV; } else { // Fallback: Infer from metadata structure if (meta.fcType !== undefined && typeof meta.fcType === 'number') { // SatLoc has numeric fcType (FCTypes enum) normalized.type = DataTypes.SATLOC; } else if (meta.fcType !== undefined && typeof meta.fcType === 'string') { // AgNav has string fcType (e.g., "Ag-Granular", "Satloc Constant Rate") normalized.type = DataTypes.AGNAV; } else if (meta.pilotName || meta.aircraftId) { // SatLoc-specific fields normalized.type = DataTypes.SATLOC; } else if (meta.operator || meta.client || meta.appRateUnitStr) { // AgNav-specific fields normalized.type = DataTypes.AGNAV; } } } // Normalize based on source type if (normalized.type === DataTypes.SATLOC) { // SatLoc normalization // matType: 'wet' for liquid, 'dry' for dry material if (meta.fcType !== undefined && typeof meta.fcType === 'number') { normalized.matType = meta.fcType === FCTypes.DRY ? MatTypes.DRY : MatTypes.WET; } // operator: pilot name from System Setup record (Type 100) if (meta.pilotName && !normalized.operator) { normalized.operator = meta.pilotName; } // fcName: flow controller name from Controller Type record (Type 46) // Already present in metadata as fcName } else if (normalized.type === DataTypes.AGNAV) { // AgNav normalization // matType: derive from appRateUnitStr, fcType string, or job's appRateUnit let matType = null; // Priority 1: infer from appRateUnitStr (most reliable) if (meta.appRateUnitStr) { const unitStr = meta.appRateUnitStr.toLowerCase(); // Liquid units: gal/ac, l/ha, oz/ac // Dry units: lbs/ac, kg/ha matType = (unitStr.includes('gal') || unitStr.includes('l/') || unitStr.includes('oz')) ? MatTypes.WET : MatTypes.DRY; } // Priority 2: infer from fcType string if appRateUnitStr not available if (!matType && meta.fcType) { const utils = require('../helpers/utils'); const matTypeFromFCType = utils.matTypeFromFCType(meta.fcType); if (matTypeFromFCType !== 'none') { matType = matTypeFromFCType === 'dry' ? MatTypes.DRY : MatTypes.WET; } } // Priority 3: fallback to job's appRateUnit if provided if (!matType && jobContext && jobContext.appRateUnit !== undefined) { // Dry units: LBS_PER_ACRE, KG_PER_HA // Wet units: OZ_PER_ACRE, GAL_PER_ACRE, LIT_PER_HA matType = (jobContext.appRateUnit === RateUnits.LBS_PER_ACRE || jobContext.appRateUnit === RateUnits.KG_PER_HA) ? MatTypes.DRY : MatTypes.WET; } normalized.matType = matType; // operator: already in meta.operator for AgNav // fcName: use fcType as flow controller name for AgNav if (meta.fcType && !normalized.fcName) { normalized.fcName = meta.fcType; } } return normalized; } /** * Get list of uploaded files given the jobId with normalized metadata * @route POST /api/jobs/appFiles * @param {Object} req.body * @param {string} req.body.jobId - Job ID * @returns {Array} List of application files with normalized metadata */ async function appFiles_post(req, res) { const params = req.body; if (!params || !params.jobId) AppParamError.throw(); // Fetch job details for normalization context const job = await Job.findById(params.jobId, 'appRateUnit measureUnit').lean(); const jobContext = job ? { appRateUnit: job.appRateUnit, measureUnit: job.measureUnit } : null; const jobs = await App.aggregate([ { $match: { jobId: params.jobId, markedDelete: { $ne: true } } }, { $lookup: { from: "appfiles", localField: "_id", foreignField: "appId", as: "appfiles" } }, { $unwind: { path: '$appfiles' } }, { $project: { fid: '$appfiles._id', name: '$appfiles.name', agn: '$appfiles.agn', meta: '$appfiles.meta' } }, { $sort: { agn: 1 } }]); // Normalize metadata for each file with job context const normalizedFiles = jobs.map(file => ({ ...file, meta: normalizeAppFileMeta(file.meta, file.name, jobContext) })); res.json(normalizedFiles); } /** * Get spray data records given the fileIds list */ async function filesdata_post(req, res) { const params = req.body; if (!params || !params.fileId) AppParamError.throw(); // Validate cursor pagination parameters const validation = validateCursorParams(params); if (!validation.valid) { return res.status(400).json({ error: validation.error }); } // Use cursor-based pagination helper (Stripe API style) // Efficient for large datasets - uses _id index instead of skip() const result = await paginateWithCursor( AppDetail, params, { fileId: params.fileId }, // Base filter { cursorField: '_id' } ); res.json(result); } async function fetchInvReadyJobs_post(req, res) { /* params { excludeIds:[], currency: <3-char ISO currency> } */ const input = req.body; assert(input, AppParamError.create()); const puid = req.userInfo?.puid; if (!puid || !ObjectId.isValid(puid)) AppParamError.throw(Errors.INVALID_PUID); const filter = { ...(!utils.isEmptyArray(input.excludeIds) && { _id: { $nin: input.excludeIds } }), markedDelete: { $ne: true }, invoiceStatus: { $in: [null, JobInvoiceStatus.NONE] }, status: { $gte: JobStatus.READY }, status: { $ne: 9 }, costings: { $ne: null }, "costings.billableAmount": { $gt: 0 }, ...(input.currency && { "costings.currency": input.currency }), byPuid: ObjectId(puid) }; const jobs = await Job.find(filter, '_id name measureUnit status costings invoiceStatus').populate('client', '_id name'); res.json(jobs); } return { getJobs_get, createJob_post, getJob_get, updateJob_put, deleteJob, getData_post, getReportOps_get, preAppReport_post, getRptVars_post, setRptVars_post, saveReport_post, preLoadReport_post, getUploadedFiles_post, importStatus_post, importingStatus_post, deleteAppFile_post, getJobLogs_post, assign_post, assignments_post, countByClient_post, saveMapOps_post, searchJobs_post, appFiles_post, filesdata_post, getAppDataByJobId, fetchInvReadyJobs_post } }