347 lines
11 KiB
JavaScript
347 lines
11 KiB
JavaScript
const
|
|
fs = require('fs-extra'),
|
|
path = require('path'),
|
|
utils = require('./utils'),
|
|
polyUtil = require('./poly_util'),
|
|
FILE = require('./file_constants'),
|
|
{ SystemTypes } = require('./constants'),
|
|
debug = require('debug')('agm:file_satlog');
|
|
|
|
const POL = ".POL";
|
|
const PAT = ".PAT";
|
|
const INC = "INC";
|
|
const EXC = "EXC";
|
|
const SWIDTH = "SWIDTH";
|
|
|
|
// Constants for file formatting
|
|
const CRLF = '\r\n'; // Windows line ending as per JOB format specification
|
|
|
|
function getItem(val, pos) {
|
|
return val[pos] ? val[pos].trim() : '';
|
|
}
|
|
|
|
function readSatLogJob(filePath, areaTypeAndFiles, ops, cb) {
|
|
let firstCol, fields, lat, lon, valueF, swath = 0, isMetric = false, newPolygon = false, sprayPoly = false;
|
|
let cornerSets = [], sprayAreas = [], xclAreas = [];
|
|
const bareFilename = utils.normalizeName(utils.bareFilename(areaTypeAndFiles.area));
|
|
|
|
return fs.readFile(path.join(filePath, areaTypeAndFiles.area))
|
|
.then((data) => {
|
|
let areas, lines = data.toString().split("\n");
|
|
|
|
for (const i in lines) {
|
|
if (sprayAreas.length >= FILE.MAX_ITEM || xclAreas.length >= FILE.MAX_ITEM) break;
|
|
|
|
fields = lines[i].trim().split(new RegExp('\\s+'));
|
|
firstCol = getItem(fields, 0);
|
|
|
|
if (!firstCol.length) continue;
|
|
|
|
if (firstCol === POL || firstCol === PAT || i == lines.length - 1) {
|
|
newPolygon = (firstCol === POL) ? true : false;
|
|
|
|
if (cornerSets.length >= 3) {
|
|
if (sprayPoly) {
|
|
areas = polyUtil.createAreas(0, cornerSets, bareFilename, ops.sprZoneColor, ops.appRate, ops.crop);
|
|
if (sprayAreas.length < FILE.MAX_ITEM && !utils.isEmptyArray(areas))
|
|
sprayAreas = sprayAreas.concat(areas);
|
|
}
|
|
else {
|
|
areas = polyUtil.createAreas(1, cornerSets, 'XCL');
|
|
if (xclAreas.length < FILE.MAX_ITEM && !utils.isEmptyArray(areas))
|
|
xclAreas = xclAreas.concat(areas);
|
|
}
|
|
}
|
|
cornerSets = [];
|
|
continue;
|
|
}
|
|
|
|
if (newPolygon) {
|
|
if (firstCol === INC || firstCol === EXC) {
|
|
sprayPoly = (firstCol === INC) ? true : false;
|
|
}
|
|
else {
|
|
if (fields.length === 2) {
|
|
lat = parseFloat(getItem(fields, 0));
|
|
lon = parseFloat(getItem(fields, 1));
|
|
if (utils.isNumber(lat) && utils.isNumber(lon)) {
|
|
cornerSets.push([lon, lat]); // Revert coor order in GeoJSON
|
|
}
|
|
}
|
|
}
|
|
}
|
|
else {
|
|
if (firstCol === SWIDTH && fields.length > 1) {
|
|
valueF = parseFloat(getItem(fields, 1));
|
|
if (utils.isNumber(valueF))
|
|
swath = valueF;
|
|
if (fields.length >= 3) {
|
|
isMetric = (getItem(fields, 3)).toUpperCase() === 'M' ? true : false;
|
|
}
|
|
continue;
|
|
}
|
|
}
|
|
}
|
|
|
|
const jobItem = {
|
|
sprayAreas: sprayAreas,
|
|
xclAreas: xclAreas,
|
|
meta: { measureUnit: isMetric, swath: swath }
|
|
};
|
|
|
|
if (!utils.isEmptyArray(xclAreas)) {
|
|
jobItem['xclAreas'] = polyUtil.processXclsName(jobItem.sprayAreas, jobItem.xclAreas);
|
|
}
|
|
|
|
if (cb)
|
|
cb(null, jobItem);
|
|
else
|
|
return jobItem;
|
|
})
|
|
.catch(err => {
|
|
if (cb)
|
|
return cb(err);
|
|
else throw err;
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Create a SatLoc job from Agmission spray areas and/or excluded areas
|
|
* @param {Object} job - Job object containing name, sprayAreas, and excludedAreas, waypoints, etc.
|
|
* @param {string} systemType - Type of the system (use SystemTypes constants)
|
|
* @returns {string} Content of the satloc job file
|
|
*/
|
|
function createSatLocJob(job, systemType = SystemTypes.G4) {
|
|
const jobUtil = require('./job_util');
|
|
const { _id, name, sprayAreas } = job;
|
|
|
|
// Process buffers to excluded areas if they exist
|
|
const excludedAreas = jobUtil.processBuffersToXclAreas(job);
|
|
|
|
if (utils.isEmptyArray(sprayAreas) && utils.isEmptyArray(excludedAreas)) {
|
|
return null;
|
|
}
|
|
|
|
const jobNumber = _id;
|
|
|
|
// Generate internal job name based on system type
|
|
const internalJobName = generateInternalJobName(name, systemType, jobNumber);
|
|
|
|
let jobFileContent = `.JOB ${internalJobName} ${internalJobName}${CRLF}.VERSION 2${CRLF}`;
|
|
|
|
// Add job name if provided
|
|
if (name) {
|
|
jobFileContent += `.JNM ${internalJobName}${CRLF}`;
|
|
jobFileContent += `.LLB ${internalJobName}${CRLF}`;
|
|
}
|
|
|
|
function getAreaName(area) {
|
|
// Extract area name from properties, fallback to default naming
|
|
return area.properties?.name || area.properties?.label || '';
|
|
}
|
|
|
|
function processAreas(areas, type) {
|
|
if (utils.isEmptyArray(areas)) return;
|
|
|
|
for (let index = 0; index < areas.length; index++) {
|
|
const area = areas[index];
|
|
if (!area.geometry || !area.geometry.coordinates || area.geometry.coordinates.length === 0) {
|
|
debug(`Area ${index + 1} is missing geometry or coordinates`);
|
|
continue;
|
|
}
|
|
|
|
const polId = index + 1; // Polygon ID is index + 1
|
|
const areaName = getAreaName(area);
|
|
let coordinates = area.geometry.coordinates[0];
|
|
|
|
// Remove duplicate last coordinate if it matches first (JOB format requirement)
|
|
if (coordinates.length > 1
|
|
&& coordinates[0][0] === coordinates[coordinates.length - 1][0] && coordinates[0][1] === coordinates[coordinates.length - 1][1]) {
|
|
coordinates.pop();
|
|
}
|
|
|
|
// Reverse coordinate order for exclusive areas (counterclockwise)
|
|
if (type === 'EXC') {
|
|
coordinates = coordinates.reverse();
|
|
}
|
|
|
|
// Add polygon header with polygon ID and area name if exists
|
|
if (areaName) {
|
|
jobFileContent += `.POL ${polId}\t${areaName}${CRLF}`;
|
|
} else {
|
|
jobFileContent += `.POL ${polId} ${polId}${CRLF}`;
|
|
}
|
|
|
|
jobFileContent += `\t${type}${CRLF}`; // TAB indent for type
|
|
|
|
// Add RGB color information (optional but common)
|
|
if (type === 'INC') {
|
|
jobFileContent += `\tRGB: 204,000,000, 0, 1${CRLF}`; // TAB indent, spray polygon
|
|
}
|
|
|
|
// Add coordinates with TAB indent
|
|
for (let i = 0; i < coordinates.length; i++) {
|
|
const coord = coordinates[i];
|
|
jobFileContent += `\t${utils.fixedTo(coord[1], 6)} ${utils.fixedTo(coord[0], 6)}${CRLF}`; // TAB indent, lat lon
|
|
}
|
|
}
|
|
}
|
|
|
|
// Process inclusive areas first, then exclusive areas
|
|
if (!utils.isEmptyArray(sprayAreas)) {
|
|
processAreas(sprayAreas, 'INC');
|
|
}
|
|
|
|
if (!utils.isEmptyArray(excludedAreas)) {
|
|
processAreas(excludedAreas, 'EXC');
|
|
}
|
|
|
|
return jobFileContent;
|
|
}
|
|
|
|
/**
|
|
* Generate internal job name based on SatLoc system type
|
|
* @param {string} systemType - System type (use SystemTypes constants)
|
|
* @param {number} jobNumber - Job number
|
|
* @returns {string} Internal job name
|
|
*/
|
|
function generateInternalJobName(systemType, jobNumber) {
|
|
return String(jobNumber);
|
|
}
|
|
|
|
/**
|
|
* Extract job number from name string
|
|
* @param {string} name - Job name
|
|
* @returns {number} Job number or null
|
|
*/
|
|
function extractJobNumber(name) {
|
|
if (!name) return null;
|
|
|
|
// Try to extract number from various formats
|
|
const patterns = [
|
|
/^(\d+)$/, // Pure number
|
|
/^Job_?(\d+)/i, // Job123, Job_123
|
|
/(\d+)\.job$/i, // 123.job
|
|
/_(\d+)$/ // ending with _123
|
|
];
|
|
|
|
for (const pattern of patterns) {
|
|
const match = name.match(pattern);
|
|
if (match) {
|
|
const num = parseInt(match[1], 10);
|
|
// Ensure it's within valid range (1-9999 for G4)
|
|
return (num >= 1 && num <= 9999) ? num : null;
|
|
}
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
/**
|
|
* Parse SatLoc log data from partner system.
|
|
* This one was made for parsing exported csv file after a log file loaded into MapStar
|
|
* @param {*} logBuffer The csv file content buffer
|
|
* @returns {Promise<Object>} result object with structure:
|
|
* {
|
|
* records: [],
|
|
* startTime: null,
|
|
* endTime: null,
|
|
* boundingBox: null,
|
|
* fileSize: logBuffer.length
|
|
* }
|
|
*/
|
|
function parseSatLocCSVData(logBuffer) {
|
|
return new Promise((resolve, reject) => {
|
|
try {
|
|
const logContent = logBuffer.toString('utf8');
|
|
const lines = logContent.split('\n');
|
|
|
|
const result = {
|
|
records: [],
|
|
startTime: null,
|
|
endTime: null,
|
|
boundingBox: null,
|
|
fileSize: logBuffer.length
|
|
};
|
|
|
|
let minLat = Infinity, maxLat = -Infinity;
|
|
let minLon = Infinity, maxLon = -Infinity;
|
|
|
|
for (let i = 0; i < lines.length; i++) {
|
|
const line = lines[i].trim();
|
|
if (!line) continue;
|
|
|
|
// Parse SatLoc format: assume comma-separated values
|
|
// Format: timestamp,latitude,longitude,altitude,speed,applicationRate,targetRate,sprayOn,temperature,humidity
|
|
const parts = line.split(',');
|
|
if (parts.length < 6) continue;
|
|
|
|
try {
|
|
const record = {
|
|
timestamp: new Date(parts[0]),
|
|
latitude: parseFloat(parts[1]),
|
|
longitude: parseFloat(parts[2]),
|
|
altitude: parts[3] ? parseFloat(parts[3]) : null,
|
|
speed: parts[4] ? parseFloat(parts[4]) : null,
|
|
applicationRate: parts[5] ? parseFloat(parts[5]) : null,
|
|
targetRate: parts[6] ? parseFloat(parts[6]) : null,
|
|
sprayOn: parts[7] ? parts[7].toLowerCase() === 'true' || parts[7] === '1' : false,
|
|
temperature: parts[8] ? parseFloat(parts[8]) : null,
|
|
humidity: parts[9] ? parseFloat(parts[9]) : null
|
|
};
|
|
|
|
// Validate required fields
|
|
if (isNaN(record.latitude) || isNaN(record.longitude)) {
|
|
continue;
|
|
}
|
|
|
|
// Update time range
|
|
if (!result.startTime || record.timestamp < result.startTime) {
|
|
result.startTime = record.timestamp;
|
|
}
|
|
if (!result.endTime || record.timestamp > result.endTime) {
|
|
result.endTime = record.timestamp;
|
|
}
|
|
|
|
// Update bounding box
|
|
minLat = Math.min(minLat, record.latitude);
|
|
maxLat = Math.max(maxLat, record.latitude);
|
|
minLon = Math.min(minLon, record.longitude);
|
|
maxLon = Math.max(maxLon, record.longitude);
|
|
|
|
result.records.push(record);
|
|
|
|
} catch (parseError) {
|
|
debug(`Error parsing line ${i + 1}: ${parseError.message}`);
|
|
continue;
|
|
}
|
|
}
|
|
|
|
// Set bounding box if we have valid records
|
|
if (result.records.length > 0) {
|
|
result.boundingBox = {
|
|
minLat,
|
|
maxLat,
|
|
minLon,
|
|
maxLon
|
|
};
|
|
}
|
|
|
|
debug(`Parsed ${result.records.length} records from SatLoc log`);
|
|
resolve(result);
|
|
|
|
} catch (error) {
|
|
debug('Error parsing SatLoc log data:', error);
|
|
reject(error);
|
|
}
|
|
});
|
|
}
|
|
|
|
module.exports = {
|
|
readSatLogJob,
|
|
parseLogData: parseSatLocCSVData,
|
|
createSatLocJob,
|
|
extractJobNumber,
|
|
generateInternalJobName
|
|
}
|