2503 lines
86 KiB
JavaScript
2503 lines
86 KiB
JavaScript
/**
|
||
* SatLoc Binary Log Parser
|
||
* High-performance async parser for SatLoc/Transland V.2 Log Files (Format Version 3.76)
|
||
*
|
||
* Parses binary log files according to LOGFileFormat_Air_3_76.md specification
|
||
* Maps data to AgMission ApplicationDetail and WorkRecord structures
|
||
*
|
||
* Enhanced with Application Processor integration for proper log grouping and file management
|
||
*/
|
||
|
||
const fs = require('fs').promises;
|
||
const moment = require('moment');
|
||
const path = require('path');
|
||
const logger = require('./logger');
|
||
const ApplicationDetail = require('../model/application_detail');
|
||
const { fixedTo } = require('../helpers/utils');
|
||
const { extractJobIdFromFileName } = require('./satloc_util');
|
||
const { FCTypes } = require('./constants');
|
||
|
||
// Record types from LOGFileFormat_Air_3_76.md specification
|
||
const RECORD_TYPES = {
|
||
// Specific numeric record types from binary format
|
||
POSITION_1: 1, // Both Short (43 bytes) and Enhanced (78 bytes) use type 1
|
||
GPS_10: 10,
|
||
GPS_STATUS_EXTENDED_11: 11, // Not used at this time May/2020
|
||
SWATH_NUMBER_20: 20,
|
||
FLOW_MONITOR_30: 30,
|
||
DUAL_FLOW_MONITOR_31: 31, // Deprecated
|
||
TARGET_APPLICATION_RATES_32: 32,
|
||
DUAL_FLOW_TARGET_RATES_33: 33,
|
||
APPLIED_RATES_36: 36,
|
||
FIRE_DRY_GATE_STATUS_37: 37,
|
||
IF2_DRY_GATE_38: 38,
|
||
TLEG_DRY_GATE_39: 39,
|
||
LASER_ALTIMETER_42: 42,
|
||
AGDISP_DATA_43: 43,
|
||
TACH_TIMES_45: 45,
|
||
CONTROLLER_TYPE_BY_NAME_46: 46,
|
||
IF2_LIQUID_BOOM_PRESSURE_47: 47,
|
||
WIND_50: 50,
|
||
MICRO_RPM_52: 52,
|
||
SBC_TEMPS_56: 56,
|
||
METERATE_57: 57,
|
||
MARKER_ASCII_60: 60,
|
||
MARKER_UNICODE_61: 61,
|
||
SYSTEM_SETUP_100: 100,
|
||
ENVIRONMENTAL_110: 110,
|
||
SWATHING_SETUP_120: 120,
|
||
FLOW_SETUP_140: 140,
|
||
BOOM_SECTIONS_142: 142,
|
||
JOB_INFO_STRING_151: 151,
|
||
JOB_INFO_NAME_STRING_152: 152
|
||
};
|
||
|
||
// Record type name resolution for debugging
|
||
const RECORD_TYPE_NAMES = {
|
||
1: 'POSITION',
|
||
10: 'GPS',
|
||
11: 'GPS_STATUS_EXTENDED',
|
||
20: 'SWATH_NUMBER',
|
||
30: 'FLOW_MONITOR',
|
||
31: 'DUAL_FLOW_MONITOR',
|
||
32: 'TARGET_APPLICATION_RATES',
|
||
33: 'DUAL_FLOW_TARGET_RATES',
|
||
36: 'APPLIED_RATES',
|
||
37: 'FIRE_DRY_GATE_STATUS',
|
||
38: 'IF2_DRY_GATE',
|
||
39: 'TLEG_DRY_GATE',
|
||
42: 'LASER_ALTIMETER',
|
||
43: 'AGDISP_DATA',
|
||
45: 'TACH_TIMES',
|
||
46: 'CONTROLLER_TYPE_BY_NAME',
|
||
47: 'IF2_LIQUID_BOOM_PRESSURE',
|
||
50: 'WIND',
|
||
52: 'MICRO_RPM',
|
||
56: 'SBC_TEMPS',
|
||
57: 'METERATE',
|
||
60: 'MARKER_ASCII',
|
||
61: 'MARKER_UNICODE',
|
||
100: 'SYSTEM_SETUP',
|
||
110: 'ENVIRONMENTAL',
|
||
120: 'SWATHING_SETUP',
|
||
140: 'FLOW_SETUP',
|
||
142: 'BOOM_SECTIONS',
|
||
151: 'JOB_INFO_STRING',
|
||
152: 'JOB_INFO_NAME_STRING'
|
||
};
|
||
|
||
// Record start flag from specification
|
||
const RECORD_START_FLAG = 0xA5;
|
||
|
||
class SatLocLogParser {
|
||
constructor(options = {}) {
|
||
this.options = {
|
||
batchSize: options.batchSize || 1000,
|
||
skipUnknownRecords: options.skipUnknownRecords !== false,
|
||
validateChecksums: options.validateChecksums !== false,
|
||
debugRecordTypes: options.debugRecordTypes || [], // Array of record types to debug with full details
|
||
verbose: options.verbose || false, // Enable verbose logging
|
||
maxPositionsPerJob: options.maxPositionsPerJob, // Only limit if explicitly set (no default)
|
||
trackSequence: options.trackSequence || false, // Only track sequence for debugging
|
||
...options
|
||
};
|
||
|
||
// Initialize Pino logger
|
||
this.logger = logger.child('satloc_parser');
|
||
|
||
this.statistics = {
|
||
totalRecords: 0,
|
||
validRecords: 0,
|
||
invalidRecords: 0,
|
||
recordTypes: {},
|
||
parseErrors: 0,
|
||
positionsSkipped: 0 // Track positions skipped due to limits
|
||
};
|
||
|
||
// Track actual sequence for pattern analysis
|
||
this.recordSequence = [];
|
||
}
|
||
|
||
/**
|
||
* Get record type name for debugging
|
||
*/
|
||
getRecordTypeName(recordType) {
|
||
return RECORD_TYPE_NAMES[recordType] || `UNKNOWN_${recordType}`;
|
||
}
|
||
|
||
/**
|
||
* Check if record type should be debugged with full details
|
||
*/
|
||
shouldDebugRecord(recordType) {
|
||
return this.options.debugRecordTypes.includes(recordType) ||
|
||
this.options.debugRecordTypes.includes('ALL');
|
||
}
|
||
|
||
/**
|
||
* Format timestamp object as a single line string
|
||
*/
|
||
formatTimestamp(timestamp) {
|
||
if (!timestamp || typeof timestamp !== 'object') return timestamp;
|
||
if (timestamp.year && timestamp.month && timestamp.day) {
|
||
return `${timestamp.year}-${String(timestamp.month).padStart(2, '0')}-${String(timestamp.day).padStart(2, '0')} ${String(timestamp.hour).padStart(2, '0')}:${String(timestamp.minute).padStart(2, '0')}:${String(timestamp.seconds).padStart(2, '0')}.${String(timestamp.milliseconds).padStart(3, '0')}`;
|
||
}
|
||
return timestamp;
|
||
}
|
||
|
||
/**
|
||
* Format data object for logging, converting timestamps to single line
|
||
*/
|
||
formatDataForLogging(data) {
|
||
if (!data || typeof data !== 'object') return data;
|
||
|
||
const formatted = { ...data };
|
||
if (formatted.timestamp) {
|
||
formatted.timestamp = this.formatTimestamp(formatted.timestamp);
|
||
}
|
||
return formatted;
|
||
}
|
||
|
||
/**
|
||
* Log debug information with record type name
|
||
*/
|
||
debugRecord(recordType, message, data = null) {
|
||
const recordName = this.getRecordTypeName(recordType);
|
||
if (this.options.verbose || this.shouldDebugRecord(recordType)) {
|
||
if (data) {
|
||
const formattedData = this.formatDataForLogging(data);
|
||
this.logger.debug({
|
||
module: 'satloc_parser',
|
||
recordType,
|
||
recordName,
|
||
message,
|
||
data: formattedData
|
||
}, `[${recordName}_${recordType}] ${message}`);
|
||
} else {
|
||
this.logger.debug({
|
||
module: 'satloc_parser',
|
||
recordType,
|
||
recordName,
|
||
message
|
||
}, `[${recordName}_${recordType}] ${message}`);
|
||
}
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Parse a SatLoc binary log file
|
||
* @param {string} filePath - Path to the .log file
|
||
* @param {Object} fileContext - Context information (fileId, jobId, etc.)
|
||
* @returns {Promise<Object>} Parse results with statistics and data
|
||
*/
|
||
async parseFile(filePath, fileContext = {}) {
|
||
|
||
try {
|
||
// Extract job ID from filename using utility
|
||
const fileName = path.basename(filePath);
|
||
const filenameJobId = extractJobIdFromFileName(fileName);
|
||
|
||
// Merge filename job ID into file context
|
||
const enhancedFileContext = {
|
||
...fileContext,
|
||
fileName,
|
||
filenameJobId
|
||
};
|
||
|
||
// Read file directly as binary buffer
|
||
const binaryBuffer = await fs.readFile(filePath);
|
||
|
||
// Parse header from buffer
|
||
const headerInfo = await this.readHeaderFromBuffer(binaryBuffer);
|
||
|
||
// Parse binary records from buffer
|
||
const parseResults = await this.parseRecordsFromBuffer(binaryBuffer, headerInfo, enhancedFileContext);
|
||
|
||
return {
|
||
success: true,
|
||
headerInfo,
|
||
fileName,
|
||
filenameJobId,
|
||
statistics: this.statistics,
|
||
...parseResults
|
||
};
|
||
|
||
} catch (error) {
|
||
this.logger.error({ error: error.message, filePath }, `Parse error: ${error.message}`);
|
||
return {
|
||
success: false,
|
||
error: error.message,
|
||
statistics: this.statistics
|
||
};
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Read and validate file header from buffer (ASCII "AS" + version string)
|
||
* Note: Some SatLoc formats don't use null terminators - they use the 0xA5 record start flag
|
||
* to mark the end of the header. We check for both null byte and 0xA5 record start.
|
||
*/
|
||
async readHeaderFromBuffer(buffer) {
|
||
try {
|
||
if (buffer.length < 3) {
|
||
throw new Error('Buffer too short for valid header');
|
||
}
|
||
|
||
// Check for ASCII "AS"
|
||
if (buffer[0] !== 0x41 || buffer[1] !== 0x53) { // 'A', 'S'
|
||
throw new Error('Invalid file header - missing AS signature');
|
||
}
|
||
|
||
// Find version string end - check for BOTH null byte (0x00) AND record start flag (0xA5)
|
||
// Some formats use null terminator, others use 0xA5 to mark first record
|
||
let versionEnd = 2;
|
||
while (versionEnd < buffer.length && buffer[versionEnd] !== 0 && buffer[versionEnd] !== RECORD_START_FLAG) {
|
||
versionEnd++;
|
||
}
|
||
|
||
if (versionEnd >= buffer.length) {
|
||
throw new Error('Invalid file header - no terminator found (null byte or 0xA5 record start)');
|
||
}
|
||
|
||
// Extract version, trimming any trailing spaces
|
||
const version = buffer.slice(2, versionEnd).toString('ascii').trim();
|
||
|
||
// Header length is up to (but not including) the terminator
|
||
// If terminator is 0xA5 (record start), headerLength is versionEnd (records start there)
|
||
// If terminator is 0x00 (null byte), headerLength is versionEnd + 1 (skip null)
|
||
const headerLength = buffer[versionEnd] === RECORD_START_FLAG ? versionEnd : versionEnd + 1;
|
||
|
||
this.logger.debug({
|
||
version,
|
||
headerLength,
|
||
terminatorType: buffer[versionEnd] === RECORD_START_FLAG ? '0xA5 (record start)' : '0x00 (null byte)',
|
||
terminatorPosition: versionEnd
|
||
}, `Parsed header: version="${version}", headerLength=${headerLength}`);
|
||
|
||
return {
|
||
version,
|
||
headerLength
|
||
};
|
||
|
||
} catch (error) {
|
||
this.logger.error({ error: error.message }, `Header read error: ${error.message}`);
|
||
throw error;
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Extract null-terminated string from buffer, handling padding characters
|
||
*/
|
||
extractNullTerminatedString(buffer) {
|
||
if (!buffer || buffer.length === 0) return '';
|
||
|
||
// Find first null byte
|
||
const nullIndex = buffer.indexOf(0);
|
||
|
||
if (nullIndex === -1) {
|
||
// No null byte found, return entire buffer as string
|
||
return buffer.toString('ascii').trim();
|
||
}
|
||
|
||
// Extract string up to null byte
|
||
return buffer.subarray(0, nullIndex).toString('ascii').trim();
|
||
}
|
||
|
||
/**
|
||
* Read and validate file header (ASCII "AS" + version + null byte)
|
||
*/
|
||
async readFileHeader(filePath) {
|
||
const handle = await fs.open(filePath, 'r');
|
||
try {
|
||
// Read first 32 bytes to find header
|
||
const buffer = Buffer.allocUnsafe(32);
|
||
const { bytesRead } = await handle.read(buffer, 0, 32, 0);
|
||
|
||
if (bytesRead < 3) {
|
||
throw new Error('File too short for valid header');
|
||
}
|
||
|
||
// Check for ASCII "AS"
|
||
if (buffer[0] !== 0x41 || buffer[1] !== 0x53) { // 'A', 'S'
|
||
throw new Error('Invalid file header - missing "AS" signature');
|
||
}
|
||
|
||
// Find null terminator for version
|
||
let versionEnd = 2;
|
||
while (versionEnd < bytesRead && buffer[versionEnd] !== 0) {
|
||
versionEnd++;
|
||
}
|
||
|
||
if (versionEnd >= bytesRead) {
|
||
throw new Error('Invalid file header - no null terminator found');
|
||
}
|
||
|
||
const version = buffer.slice(2, versionEnd).toString('ascii');
|
||
const dataStartOffset = versionEnd + 1;
|
||
|
||
return {
|
||
version,
|
||
dataStartOffset,
|
||
fileSize: (await handle.stat()).size
|
||
};
|
||
|
||
} finally {
|
||
await handle.close();
|
||
}
|
||
}
|
||
|
||
/**
|
||
* Parse binary records from the binary buffer
|
||
* Record format: 0xA5 (start flag) + Length + Type + Checksum + Data
|
||
* Total record length = Length field (includes 4 header bytes)
|
||
* Checksum = XOR of all bytes from start flag to end of data (inclusive)
|
||
*
|
||
* @param {*} buffer the binary buffer
|
||
* @param {*} headerInfo parsed file header
|
||
* @param {*} fileContext the context for the file being processed
|
||
* @returns {Promise<Object>} the parsed records
|
||
*/
|
||
async parseRecordsFromBuffer(buffer, headerInfo, fileContext) {
|
||
// MEMORY OPTIMIZATION: Don't accumulate all records - only keep essential metadata
|
||
// const records = []; // REMOVED - causes memory leak
|
||
let recordCount = 0; // Track count instead
|
||
|
||
let currentGPS = null; // GPS (10)
|
||
let currentFlow = null; // Flow Monitor (130)
|
||
let currentFlowSetup = null; // Flow Setup (140)
|
||
let currentWind = null; // Wind (50)
|
||
let currentSwath = null; // Swath Number (20)
|
||
let currentEnvironmental = null; // Environmental (70)
|
||
let currentLaser = null; // Laser (80)
|
||
let currentAppliedRate = null; // Applied Rate (36)
|
||
let currentTargetRate = null; // Target Rate (32)
|
||
let currentPressure = null; // Boom Pressure (47)
|
||
|
||
let currentGPSExtent = null; // GPS 11, N/A yet since 2020
|
||
let currentSwathing = null; // Swathing (120)
|
||
let currentControllerType = null; // Controller Type (130)
|
||
let currentTach = null; // Tach Times (45)
|
||
let currentAgdisp = null; // AgDisp Data (43)
|
||
let currentSystemSetup = null; // System Setup (100)
|
||
|
||
// Bounding box calculation [minX, minY, maxX, maxY] (like geo_util.updateAreasBBoxLL)
|
||
let boundingBox = [Number.MAX_VALUE, Number.MAX_VALUE, (-1 * Number.MAX_VALUE), (-1 * Number.MAX_VALUE)];
|
||
let utmZone = null;
|
||
|
||
// Job detection and grouping variables
|
||
const jobGroups = {};
|
||
let detectedJobIds = {
|
||
jobLongLabelName: null, // From SWATHING_SETUP_120
|
||
satlocJobId: null, // From JOB_INFO_STRING_151 or JOB_INFO_NAME_STRING_152
|
||
filenameJobId: fileContext.filenameJobId || null
|
||
};
|
||
let currentJobId = null; // Current effective job ID for grouping
|
||
|
||
// Metadata extraction variables (moved from application processor)
|
||
const metadata = {
|
||
jobId: null,
|
||
satlocJobId: null, // Will be set at end of parsing with priority: filename -> jobLongLabelName -> satlocJobId (151/152)
|
||
aircraftId: null,
|
||
pilotName: null,
|
||
fcType: null, // Flow Controller Type: Liquid, Dry
|
||
fcName: null, // Flow Controller Name
|
||
};
|
||
|
||
let position = headerInfo.headerLength || 0; // Start after header
|
||
const bufferSize = buffer.length;
|
||
|
||
this.logger.debug({ position, bufferSize }, `Starting record parsing from position ${position}, buffer size: ${bufferSize}`);
|
||
this.logger.debug({ firstBytes: buffer.slice(position, position + 20).toString('hex') }, `First 20 bytes after header`);
|
||
|
||
while (position < bufferSize - 4) { // Need at least 4 bytes for record header
|
||
// Look for record start flag (0xA5)
|
||
if (buffer[position] !== RECORD_START_FLAG) {
|
||
position++;
|
||
continue;
|
||
}
|
||
|
||
if (this.options.verbose) {
|
||
this.logger.debug({ position }, `Found potential record start at position ${position}`);
|
||
}
|
||
|
||
// Record structure: Start Flag (1) + Length (1) + Type (1) + Checksum (1) + Data (Length-4)
|
||
const recordLength = buffer[position + 1];
|
||
const recordType = buffer[position + 2];
|
||
const recordChecksum = buffer[position + 3];
|
||
|
||
if (this.options.verbose) {
|
||
this.logger.debug({ recordLength, recordType, recordChecksum }, `Record: length=${recordLength}, type=${recordType}, checksum=${recordChecksum}`);
|
||
}
|
||
|
||
// Validate record length (minimum 4 for header, maximum 255)
|
||
if (recordLength < 4 || recordLength > 255) {
|
||
if (this.options.verbose) {
|
||
this.logger.debug({ recordLength }, `Invalid record length: ${recordLength}`);
|
||
}
|
||
position++;
|
||
continue;
|
||
}
|
||
|
||
// Check if we have complete record in buffer
|
||
if (position + recordLength > bufferSize) {
|
||
this.logger.debug({ position, recordLength, bufferSize }, `Incomplete record at end of buffer: position ${position}, length ${recordLength}, buffer size ${bufferSize}`);
|
||
break;
|
||
}
|
||
|
||
// Validate checksum if enabled (XOR of all bytes from start flag to end of data)
|
||
if (this.options.validateChecksums) {
|
||
const calculatedChecksum = this.calculateChecksum(buffer, position, recordLength);
|
||
if (calculatedChecksum !== recordChecksum) {
|
||
this.logger.debug({
|
||
recordType,
|
||
position,
|
||
expectedChecksum: recordChecksum,
|
||
calculatedChecksum
|
||
}, `Checksum mismatch for record type ${recordType} at position ${position}: expected ${recordChecksum}, calculated ${calculatedChecksum}`);
|
||
this.statistics.invalidRecords++;
|
||
position++;
|
||
continue;
|
||
}
|
||
}
|
||
|
||
// Extract record data (everything after the 4-byte header)
|
||
const dataLength = recordLength - 4;
|
||
const recordData = buffer.slice(position + 4, position + 4 + dataLength);
|
||
|
||
// Parse record based on type first to get enhanced info
|
||
let parsedRecord;
|
||
try {
|
||
parsedRecord = this.parseRecord(recordType, recordData, {
|
||
currentGPS,
|
||
currentGPSExtent,
|
||
currentFlow,
|
||
currentFlowSetup,
|
||
currentWind,
|
||
currentSwath,
|
||
currentSwathing,
|
||
currentEnvironmental,
|
||
currentLaser,
|
||
currentAppliedRate,
|
||
currentTargetRate,
|
||
currentControllerType,
|
||
currentTach,
|
||
currentAgdisp,
|
||
currentSystemSetup,
|
||
currentPressure
|
||
});
|
||
|
||
if (parsedRecord) {
|
||
this.statistics.validRecords++;
|
||
this.statistics.recordTypes[recordType] = (this.statistics.recordTypes[recordType] || 0) + 1;
|
||
|
||
// Update context based on record type
|
||
if (parsedRecord.recordType === RECORD_TYPES.POSITION_1) {
|
||
// Create application detail record with accumulated context
|
||
if (parsedRecord.lat && parsedRecord.lon) {
|
||
const context = {
|
||
currentGPS,
|
||
currentGPSExtent,
|
||
currentFlow,
|
||
currentFlowSetup,
|
||
currentWind,
|
||
currentSwath,
|
||
currentSwathing,
|
||
currentEnvironmental,
|
||
currentLaser,
|
||
currentAppliedRate,
|
||
currentTargetRate,
|
||
currentControllerType,
|
||
currentTach,
|
||
currentAgdisp,
|
||
currentSystemSetup,
|
||
currentPressure
|
||
};
|
||
const appDetail = this.createApplicationDetail(parsedRecord, fileContext, context);
|
||
|
||
// Update bounding box calculation incrementally
|
||
if (appDetail.lat !== null && appDetail.lon !== null) {
|
||
if (appDetail.lon < boundingBox[0]) boundingBox[0] = appDetail.lon; // minX (lon)
|
||
if (appDetail.lat < boundingBox[1]) boundingBox[1] = appDetail.lat; // minY (lat)
|
||
if (appDetail.lon > boundingBox[2]) boundingBox[2] = appDetail.lon; // maxX (lon)
|
||
if (appDetail.lat > boundingBox[3]) boundingBox[3] = appDetail.lat; // maxY (lat)
|
||
}
|
||
|
||
// Determine current effective job ID with priority: filename -> jobLongLabelName -> satlocJobId -> 'unknown'
|
||
currentJobId = detectedJobIds.filenameJobId && detectedJobIds.filenameJobId !== 'null' && detectedJobIds.filenameJobId.trim() !== ''
|
||
? detectedJobIds.filenameJobId
|
||
: (detectedJobIds.jobLongLabelName || detectedJobIds.satlocJobId || 'unknown');
|
||
|
||
// Group application detail by job ID with safety limit
|
||
if (!jobGroups[currentJobId]) {
|
||
jobGroups[currentJobId] = [];
|
||
}
|
||
|
||
// MEMORY OPTIMIZATION: Limit positions per job to prevent OOM (only if maxPositionsPerJob is set)
|
||
if (this.options.maxPositionsPerJob !== undefined && jobGroups[currentJobId].length >= this.options.maxPositionsPerJob) {
|
||
// Skip this position but increment counter for logging
|
||
this.statistics.positionsSkipped++;
|
||
if (this.statistics.positionsSkipped === 1) {
|
||
this.logger.warn(`Job ${currentJobId} exceeded max positions limit (${this.options.maxPositionsPerJob}), skipping additional positions`);
|
||
}
|
||
} else {
|
||
jobGroups[currentJobId].push(appDetail);
|
||
}
|
||
}
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.GPS_10 || parsedRecord.recordType === RECORD_TYPES.GPS_STATUS_EXTENDED_11) {
|
||
currentGPS = parsedRecord;
|
||
currentGPSExtent = parsedRecord; // N/A 2020, Is it available now?
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.SWATH_NUMBER_20) {
|
||
currentSwath = parsedRecord;
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.FLOW_MONITOR_30) {
|
||
currentFlow = parsedRecord;
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.TARGET_APPLICATION_RATES_32) {
|
||
currentTargetRate = parsedRecord;
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.APPLIED_RATES_36) {
|
||
currentAppliedRate = parsedRecord;
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.LASER_ALTIMETER_42) {
|
||
currentLaser = parsedRecord;
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.AGDISP_DATA_43) {
|
||
currentAgdisp = parsedRecord;
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.TACH_TIMES_45) {
|
||
currentTach = parsedRecord;
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.CONTROLLER_TYPE_BY_NAME_46) {
|
||
currentControllerType = parsedRecord;
|
||
|
||
// Metadata extraction
|
||
if (parsedRecord.controllerType && !metadata.fcName) {
|
||
metadata.fcName = parsedRecord.controllerType;
|
||
}
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.WIND_50) {
|
||
currentWind = parsedRecord;
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.SYSTEM_SETUP_100) {
|
||
currentSystemSetup = parsedRecord;
|
||
|
||
// Metadata extraction
|
||
if (parsedRecord.aircraftId && !metadata.aircraftId) {
|
||
metadata.aircraftId = parsedRecord.aircraftId;
|
||
}
|
||
if (parsedRecord.pilotName && !metadata.pilotName) {
|
||
metadata.pilotName = parsedRecord.pilotName;
|
||
}
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.ENVIRONMENTAL_110) {
|
||
currentEnvironmental = parsedRecord;
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.SWATHING_SETUP_120) {
|
||
// Note: JobId,JobLongLabelName could be used later for matching if Satloc can assure the matching jobName later
|
||
currentSwathing = parsedRecord;
|
||
|
||
// Job ID detection: Extract jobLongLabelName, fallback, for job grouping
|
||
if (parsedRecord.jobLongLabelName) {
|
||
detectedJobIds.jobLongLabelName = parsedRecord.jobLongLabelName;
|
||
this.logger.debug(`Detected job ID from SWATHING_SETUP_120: ${parsedRecord.jobLongLabelName}`);
|
||
}
|
||
|
||
// Metadata extraction
|
||
if (parsedRecord.jobId && !metadata.jobId) {
|
||
metadata.jobId = parsedRecord.jobId;
|
||
}
|
||
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.FLOW_SETUP_140) {
|
||
currentFlowSetup = parsedRecord;
|
||
metadata.fcType = parsedRecord.flowControlStatus.dry ? FCTypes.DRY : FCTypes.LIQUID;
|
||
// Flow Setup (140) is used as fallback for target rate info in getFlowRates()
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.JOB_INFO_STRING_151) {
|
||
// JOB_INFO_STRING_151 processed - extract job info
|
||
if (parsedRecord.jobInfo) {
|
||
detectedJobIds.satlocJobId = parsedRecord.jobInfo;
|
||
this.logger.debug(`Detected job ID from JOB_INFO_STRING_151: ${parsedRecord.jobInfo}`);
|
||
}
|
||
} else if (parsedRecord.recordType === RECORD_TYPES.JOB_INFO_NAME_STRING_152) {
|
||
// JOB_INFO_NAME_STRING_152 processed - extract job name (jobFileName field from parser)
|
||
if (parsedRecord.jobFileName) {
|
||
detectedJobIds.satlocJobId = parsedRecord.jobFileName;
|
||
this.logger.debug(`Detected job ID from JOB_INFO_NAME_STRING_152: ${parsedRecord.jobFileName}`);
|
||
}
|
||
}
|
||
|
||
// Track this record in the actual sequence (after parsing to get enhanced info)
|
||
this.recordSequence.push({
|
||
recordType,
|
||
position: this.statistics.totalRecords,
|
||
bytePosition: position,
|
||
length: recordLength,
|
||
isEnhanced: parsedRecord.isEnhanced || false,
|
||
recordSubtype: parsedRecord.recordSubtype || null
|
||
});
|
||
|
||
// MEMORY OPTIMIZATION: Don't push to records array - causes memory leak
|
||
// records.push(parsedRecord); // REMOVED
|
||
recordCount++; // Just increment counter
|
||
}
|
||
} catch (parseError) {
|
||
this.logger.error({ recordType, error: parseError.message }, `Error parsing record type ${recordType}: ${parseError.message}`);
|
||
this.statistics.parseErrors++;
|
||
}
|
||
|
||
this.statistics.totalRecords++;
|
||
position += recordLength; // Move to next record
|
||
}
|
||
|
||
this.logger.info({ recordCount }, `Completed parsing: found ${recordCount} records`);
|
||
|
||
// Calculate UTM zone from bounding box if we have valid coordinates
|
||
if (boundingBox[0] !== Number.MAX_VALUE) {
|
||
const geoUtil = require('./geo_util');
|
||
utmZone = geoUtil.calcRefZonebyBbox(boundingBox);
|
||
this.logger.debug(`Calculated UTM zone: ${utmZone.zone}${utmZone.hemisphere} from bbox [${boundingBox.join(', ')}]`);
|
||
}
|
||
|
||
// Finalize metadata with satlocJobId from jobGroups keys
|
||
// Extract all job IDs from jobGroups and join as comma-separated string
|
||
const jobGroupKeys = Object.keys(jobGroups);
|
||
if (jobGroupKeys.length > 0) {
|
||
metadata.jobId = jobGroupKeys.join(',');
|
||
}
|
||
|
||
// Finalize satlocJobId with priority: filename -> jobLongLabelName -> satlocJobId (151/152)
|
||
metadata.satlocJobId = detectedJobIds.filenameJobId
|
||
|| detectedJobIds.jobLongLabelName
|
||
|| detectedJobIds.satlocJobId
|
||
|| null;
|
||
|
||
// MEMORY OPTIMIZATION: Log job group sizes for monitoring
|
||
const jobGroupSizes = {};
|
||
let totalPositions = 0;
|
||
for (const [jobId, details] of Object.entries(jobGroups)) {
|
||
jobGroupSizes[jobId] = details.length;
|
||
totalPositions += details.length;
|
||
}
|
||
this.logger.info({
|
||
jobGroupSizes,
|
||
totalPositions,
|
||
positionsSkipped: this.statistics.positionsSkipped,
|
||
maxPositionsPerJob: this.options.maxPositionsPerJob
|
||
}, `Job groups: ${totalPositions} positions across ${jobGroupKeys.length} jobs`);
|
||
|
||
if (this.statistics.positionsSkipped > 0) {
|
||
this.logger.warn(`Skipped ${this.statistics.positionsSkipped} positions to prevent memory overflow`);
|
||
}
|
||
|
||
return {
|
||
// MEMORY OPTIMIZATION: Don't return full records array
|
||
// records, // REMOVED
|
||
recordCount,
|
||
// New integrated calculations
|
||
boundingBox: boundingBox[0] !== Number.MAX_VALUE ? boundingBox : null,
|
||
utmZone: utmZone ? {
|
||
zoneNumber: utmZone.zone,
|
||
hemisphere: utmZone.hemisphere,
|
||
// Legacy format for backward compatibility
|
||
toString: () => `${utmZone.zone}${utmZone.hemisphere}`
|
||
} : null,
|
||
jobGroups,
|
||
detectedJobIds,
|
||
metadata
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Calculate XOR checksum for record validation
|
||
* Checksum = XOR of all bytes from Record Start Flag to end of data (inclusive)
|
||
* This excludes the checksum byte itself
|
||
*/
|
||
calculateChecksum(buffer, startPos, length) {
|
||
let checksum = 0;
|
||
// XOR all bytes from start flag to end of data, excluding the checksum byte at position startPos + 3
|
||
for (let i = startPos; i < startPos + length; i++) {
|
||
if (i !== startPos + 3) { // Skip the checksum byte itself
|
||
checksum ^= buffer[i];
|
||
}
|
||
}
|
||
return checksum;
|
||
}
|
||
|
||
/**
|
||
* Parse individual record based on type using RECORD_TYPES constants
|
||
*/
|
||
parseRecord(recordType, data, context) {
|
||
let result = null;
|
||
|
||
switch (recordType) {
|
||
case RECORD_TYPES.POSITION_1:
|
||
result = this.parsePosition_1(data, context);
|
||
break;
|
||
case RECORD_TYPES.GPS_10:
|
||
result = this.parseGPS_10(data, context);
|
||
break;
|
||
case RECORD_TYPES.GPS_STATUS_EXTENDED_11:
|
||
result = this.parseGPSStatusExtended_11(data, context);
|
||
break;
|
||
case RECORD_TYPES.SWATH_NUMBER_20:
|
||
result = this.parseSwathNumber_20(data, context);
|
||
break;
|
||
case RECORD_TYPES.FLOW_MONITOR_30:
|
||
result = this.parseFlowMonitor_30(data, context);
|
||
break;
|
||
case RECORD_TYPES.DUAL_FLOW_MONITOR_31:
|
||
result = this.parseDualFlowMonitor_31(data, context);
|
||
break;
|
||
case RECORD_TYPES.TARGET_APPLICATION_RATES_32:
|
||
result = this.parseTargetApplicationRates_32(data, context);
|
||
break;
|
||
case RECORD_TYPES.DUAL_FLOW_TARGET_RATES_33:
|
||
result = this.parseDualFlowTargetRates_33(data, context);
|
||
break;
|
||
case RECORD_TYPES.APPLIED_RATES_36:
|
||
result = this.parseAppliedRates_36(data, context);
|
||
break;
|
||
case RECORD_TYPES.FIRE_DRY_GATE_STATUS_37:
|
||
result = this.parseFireDryGateStatus_37(data, context);
|
||
break;
|
||
case RECORD_TYPES.IF2_DRY_GATE_38:
|
||
result = this.parseIF2DryGate_38(data, context);
|
||
break;
|
||
case RECORD_TYPES.TLEG_DRY_GATE_39:
|
||
result = this.parseTLEGDryGate_39(data, context);
|
||
break;
|
||
case RECORD_TYPES.LASER_ALTIMETER_42:
|
||
result = this.parseLaserAltimeter_42(data, context);
|
||
break;
|
||
case RECORD_TYPES.AGDISP_DATA_43:
|
||
result = this.parseAgdispData_43(data, context);
|
||
break;
|
||
case RECORD_TYPES.TACH_TIMES_45:
|
||
result = this.parseTachTimes_45(data, context);
|
||
break;
|
||
case RECORD_TYPES.CONTROLLER_TYPE_BY_NAME_46:
|
||
result = this.parseControllerTypeByName_46(data, context);
|
||
break;
|
||
case RECORD_TYPES.IF2_LIQUID_BOOM_PRESSURE_47:
|
||
result = this.parseIF2LiquidBoomPressure_47(data, context);
|
||
if (result) {
|
||
context.currentPressure = {
|
||
primaryPressure: result.if2LiqPriBoomPressure,
|
||
dualPressure: result.if2LiqDualBoomPressure
|
||
};
|
||
}
|
||
break;
|
||
case RECORD_TYPES.WIND_50:
|
||
result = this.parseWind_50(data, context);
|
||
break;
|
||
case RECORD_TYPES.MICRO_RPM_52:
|
||
result = this.parseMicroRPM_52(data, context);
|
||
break;
|
||
case RECORD_TYPES.SBC_TEMPS_56:
|
||
result = this.parseSBCTemps_56(data, context);
|
||
break;
|
||
case RECORD_TYPES.METERATE_57:
|
||
result = this.parseMeterate_57(data, context);
|
||
break;
|
||
case RECORD_TYPES.MARKER_ASCII_60:
|
||
result = this.parseMarkerASCII_60(data, context);
|
||
break;
|
||
case RECORD_TYPES.MARKER_UNICODE_61:
|
||
result = this.parseMarkerUnicode_61(data, context);
|
||
break;
|
||
case RECORD_TYPES.SYSTEM_SETUP_100:
|
||
result = this.parseSystemSetup_100(data, context);
|
||
break;
|
||
case RECORD_TYPES.ENVIRONMENTAL_110:
|
||
result = this.parseEnvironmental_110(data, context);
|
||
break;
|
||
case RECORD_TYPES.SWATHING_SETUP_120:
|
||
result = this.parseSwathingSetup_120(data, context);
|
||
break;
|
||
case RECORD_TYPES.FLOW_SETUP_140:
|
||
result = this.parseFlowSetup_140(data, context);
|
||
break;
|
||
case RECORD_TYPES.BOOM_SECTIONS_142:
|
||
result = this.parseBoomSections_142(data, context);
|
||
break;
|
||
case RECORD_TYPES.JOB_INFO_STRING_151:
|
||
result = this.parseJobInfoString_151(data, context);
|
||
break;
|
||
case RECORD_TYPES.JOB_INFO_NAME_STRING_152:
|
||
result = this.parseJobInfoNameString_152(data, context);
|
||
break;
|
||
default:
|
||
if (!this.options.skipUnknownRecords) {
|
||
this.debugRecord(recordType, `Unknown record type encountered`);
|
||
result = {
|
||
recordType: recordType,
|
||
rawData: data
|
||
};
|
||
}
|
||
break;
|
||
}
|
||
|
||
// Log parsed result for debugging if recordType is in debugRecordTypes or verbose is enabled
|
||
if (result && (this.options.verbose || this.shouldDebugRecord(recordType))) {
|
||
this.debugRecord(recordType, 'Parsed successfully', result);
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Parse Position Record (Type 1) - handles both Short and Enhanced
|
||
* Short: 43 bytes total (39 data + 4 header)
|
||
* Enhanced: 78 bytes total (74 data + 4 header)
|
||
*/
|
||
parsePosition_1(data, context) {
|
||
if (data.length < 39) return null; // Minimum size for Position Short
|
||
|
||
let offset = 0;
|
||
const timestamp = this.parseTimestamp(data, offset);
|
||
offset += 5;
|
||
|
||
// Common fields for both Short and Enhanced
|
||
const lat = data.readDoubleLE(offset); // degrees
|
||
offset += 8;
|
||
const lon = data.readDoubleLE(offset); // degrees
|
||
offset += 8;
|
||
const altitude = data.readFloatLE(offset); // meters
|
||
offset += 4;
|
||
const speed = data.readFloatLE(offset); // m/sec
|
||
offset += 4;
|
||
const track = data.readFloatLE(offset); // degrees
|
||
offset += 4;
|
||
const xTrack = data.readFloatLE(offset); // meters
|
||
offset += 4;
|
||
const differentialAge = data.readUInt8(offset); // seconds
|
||
offset += 1;
|
||
const flags = data.readUInt8(offset); // position flags
|
||
offset += 1;
|
||
|
||
const baseRecord = {
|
||
recordType: RECORD_TYPES.POSITION_1,
|
||
timestamp,
|
||
lat,
|
||
lon,
|
||
altitude,
|
||
speed,
|
||
track,
|
||
xTrack,
|
||
differentialAge,
|
||
flags, // 0 = Spray off, 2: Spray on
|
||
};
|
||
|
||
// Check if this is Enhanced record (78 bytes total = 74 data bytes)
|
||
if (data.length >= 74) {
|
||
const recordTypeField = data.readUInt8(offset); // 1 = Enhanced, 2 = Enhanced/LPC boom on
|
||
offset += 1;
|
||
const boomControlStatus = data.readUInt8(offset);
|
||
offset += 1;
|
||
const targetFlowRateLha = data.readFloatLE(offset); // L/ha
|
||
offset += 4;
|
||
const targetFlowRateLmin = data.readFloatLE(offset); // L/min
|
||
offset += 4;
|
||
const flowRateLha = data.readFloatLE(offset); // L/ha
|
||
offset += 4;
|
||
const flowRateLmin = data.readFloatLE(offset); // L/min
|
||
offset += 4;
|
||
const valvePosition = data.readInt16LE(offset); // shaft position
|
||
offset += 2;
|
||
const statusBitFields = data.readUInt8(offset); // bit fields byte 64
|
||
offset += 1;
|
||
const primaryFlowTurbineStdev = data.readUInt8(offset); // 0-255%
|
||
offset += 1;
|
||
const dualFlowTurbineStdev = data.readUInt8(offset); // 0-255%
|
||
offset += 1;
|
||
const gpsVelNorth = data.readFloatLE(offset); // Raw GPS fVNorth
|
||
offset += 4;
|
||
const gpsVelEast = data.readFloatLE(offset); // Raw GPS fVEast
|
||
offset += 4;
|
||
const gpsVelUp = data.readFloatLE(offset); // Raw GPS fVUp
|
||
offset += 4;
|
||
|
||
return {
|
||
...baseRecord,
|
||
isEnhanced: true,
|
||
recordTypeField,
|
||
boomControlStatus,
|
||
targetFlowRateLha,
|
||
targetFlowRateLmin,
|
||
flowRateLha,
|
||
flowRateLmin,
|
||
valvePosition,
|
||
statusBitFields,
|
||
aircraftPumpOn: (statusBitFields & 0x01) ? 1 : 0,
|
||
insidePolygon: (statusBitFields & 0x02) ? 1 : 0,
|
||
constantOrVrRate: (statusBitFields & 0x04) ? 1 : 0,
|
||
autoBoomOn: (statusBitFields & 0x08) ? 1 : 0,
|
||
primaryFlowTurbineStdev,
|
||
dualFlowTurbineStdev,
|
||
gpsVelNorth,
|
||
gpsVelEast,
|
||
gpsVelUp
|
||
};
|
||
}
|
||
|
||
// Position Short record
|
||
return {
|
||
...baseRecord,
|
||
isEnhanced: false
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse additional record types (placeholders for extended functionality)
|
||
*/
|
||
parseDualFlowTargetRates_33(data, context) {
|
||
if (data.length < 6) return null; // Updated: no timestamp, minimum 6 bytes for dual flow rates
|
||
|
||
let offset = 0;
|
||
// No timestamp in this record according to spec
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.DUAL_FLOW_TARGET_RATES_33,
|
||
targetRate1: data.readUInt16LE(offset) * 0.01,
|
||
targetRate2: data.readUInt16LE(offset + 2) * 0.01,
|
||
units: data.readUInt8(offset + 4)
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Fire/Dry Gate Status Record (Type 37)
|
||
* 30 bytes total (26 data + 4 header)
|
||
*/
|
||
parseFireDryGateStatus_37(data, context) {
|
||
if (data.length < 26) return null;
|
||
|
||
let offset = 0;
|
||
const applicationMode = data.readUInt8(offset); // Mode 1 to 7
|
||
offset += 1;
|
||
const unitsChar = String.fromCharCode(data.readUInt8(offset)); // E=English, M=Metric
|
||
offset += 1;
|
||
const appliedResolution = data.readUInt8(offset); // 0=1/16", 1=1mm, 2=1/32"
|
||
offset += 1;
|
||
const activeLevels = data.readUInt8(offset); // 1 to 7 levels
|
||
offset += 1;
|
||
const loggedTargetSpread = data.readFloatLE(offset); // Kg per min (Not used)
|
||
offset += 4;
|
||
const appliedSpreadRate = data.readFloatLE(offset); // Kg/Ha
|
||
offset += 4;
|
||
const appliedSpreadPerMin = data.readFloatLE(offset); // Kg per min (Not used)
|
||
offset += 4;
|
||
const appliedGateLevel = data.readInt16LE(offset); // Resolution Units
|
||
offset += 2;
|
||
const encoderPosition = data.readInt16LE(offset); // 1 to 2048
|
||
offset += 2;
|
||
const targetEncoderPosition = data.readInt16LE(offset); // 1 to 2048
|
||
offset += 2;
|
||
const gpsTrim = data.readInt16LE(offset); // Steps +/- 1 to n
|
||
offset += 2;
|
||
const manualTrim = data.readInt16LE(offset); // Steps +/- 1 to n
|
||
offset += 2;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.FIRE_DRY_GATE_STATUS_37,
|
||
applicationMode,
|
||
units: unitsChar,
|
||
appliedResolution,
|
||
activeLevels,
|
||
loggedTargetSpread,
|
||
appliedSpreadRate,
|
||
appliedSpreadPerMin,
|
||
appliedGateLevel,
|
||
encoderPosition,
|
||
targetEncoderPosition,
|
||
gpsTrim,
|
||
manualTrim
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse IF2 Dry Gate Record (Type 38)
|
||
* 47 bytes total (43 data + 4 header)
|
||
*/
|
||
parseIF2DryGateRecord(data, context) {
|
||
if (data.length < 43) return null;
|
||
|
||
let offset = 0;
|
||
const applicationMode = data.readUInt8(offset); // Mode 2
|
||
offset += 1;
|
||
const taskMode = data.readUInt8(offset); // 0 to 3 (Local FDG/No PMap = 3)
|
||
offset += 1;
|
||
const appliedResolution = data.readUInt8(offset); // 0=1/32", 1=1/16"
|
||
offset += 1;
|
||
const machineState = data.readUInt8(offset); // 0 to 14 at this time
|
||
offset += 1;
|
||
const switchState = data.readUInt8(offset); // bit field
|
||
offset += 1;
|
||
const gateStatus = data.readUInt8(offset); // bit field
|
||
offset += 1;
|
||
const gateSoftState = data.readUInt8(offset); // 0=Go to Gate index 0, 1=User selected
|
||
offset += 1;
|
||
const targetSpreadRate = data.readFloatLE(offset); // Kg/Ha
|
||
offset += 4;
|
||
const targetSpreadPerMin = data.readFloatLE(offset); // Kg per min (Not used)
|
||
offset += 4;
|
||
const appliedSpreadRate = data.readFloatLE(offset); // Kg/Ha
|
||
offset += 4;
|
||
const appliedSpreadPerMin = data.readFloatLE(offset); // Kg per min (Not used)
|
||
offset += 4;
|
||
const gpsTrim = data.readInt16LE(offset); // +/- GPS Trimmed Speed Up/Down
|
||
offset += 2;
|
||
const manualTrim = data.readInt16LE(offset); // +/- Manually Trimmed Up/Down
|
||
offset += 2;
|
||
const miscStates = data.readUInt16LE(offset); // bit field
|
||
offset += 2;
|
||
const gateLevelSteps = data.readUInt16LE(offset); // 0 to 272 in steps of 1/32"
|
||
offset += 2;
|
||
const encoderPosition = data.readUInt16LE(offset); // Absolute Encoder position 0 to 10,000
|
||
offset += 2;
|
||
const cumulativeUptimeCpu = data.readUInt16LE(offset); // Total hours Uptime
|
||
offset += 2;
|
||
const softLevelTarget = data.readUInt16LE(offset); // 12 to 2000
|
||
offset += 2;
|
||
const pgtPGain = data.readUInt16LE(offset); // 0 to 65535
|
||
offset += 2;
|
||
const pgtGGain = data.readUInt16LE(offset); // 0 to 8000
|
||
offset += 2;
|
||
const pgtTolerance = data.readUInt16LE(offset); // 0 to 65535
|
||
offset += 2;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.IF2_DRY_GATE_38,
|
||
applicationMode,
|
||
taskMode,
|
||
appliedResolution,
|
||
machineState,
|
||
switchState: {
|
||
arm: (switchState & 0x01) ? 1 : 0,
|
||
fuselage: (switchState & 0x02) ? 1 : 0,
|
||
trigger: (switchState & 0x04) ? 1 : 0,
|
||
trim: (switchState & 0x08) ? 1 : 0
|
||
},
|
||
gateStatus: {
|
||
gateClosed: (gateStatus & 0x01) ? 1 : 0,
|
||
gateState: (gateStatus >> 1) & 0x03, // bits 1-2: 0=Fully Closed, 1=Open, 2=Soft Level
|
||
},
|
||
gateSoftState,
|
||
targetSpreadRate,
|
||
targetSpreadPerMin,
|
||
appliedSpreadRate,
|
||
appliedSpreadPerMin,
|
||
gpsTrim,
|
||
manualTrim,
|
||
miscStates: {
|
||
encoderMoved: (miscStates & 0x01) ? 1 : 0,
|
||
encoderOk: (miscStates & 0x02) ? 1 : 0,
|
||
hydroPumpOn: (miscStates & 0x04) ? 1 : 0,
|
||
hydroOpenSolenoidOn: (miscStates & 0x08) ? 1 : 0,
|
||
hydroCloseSolenoidOn: (miscStates & 0x10) ? 1 : 0
|
||
},
|
||
gateLevelSteps,
|
||
encoderPosition,
|
||
cumulativeUptimeCpu,
|
||
softLevelTarget,
|
||
pgtPGain,
|
||
pgtGGain,
|
||
pgtTolerance
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse IF2 Dry Gate Record (Type 38)
|
||
* 47 bytes total (43 data + 4 header)
|
||
*/
|
||
parseIF2DryGate_38(data, context) {
|
||
if (data.length < 43) return null;
|
||
|
||
let offset = 0;
|
||
const applicationMode = data.readUInt8(offset); // Application MODE
|
||
offset += 1;
|
||
const taskMode = data.readUInt8(offset); // TASK Mode
|
||
offset += 1;
|
||
const appliedResolution = data.readUInt8(offset); // Applied Resolution
|
||
offset += 1;
|
||
const machineState = data.readUInt8(offset); // Machine State
|
||
offset += 1;
|
||
const switchState = data.readUInt8(offset); // Switch State
|
||
offset += 1;
|
||
const gateStatus = data.readUInt8(offset); // Gate Status
|
||
offset += 1;
|
||
const gateSoftState = data.readUInt8(offset); // Gate SOFT State
|
||
offset += 1;
|
||
const targetSpreadRate = data.readFloatLE(offset); // Target Spread Rate (Kg/Ha)
|
||
offset += 4;
|
||
const targetSpreadPerMin = data.readFloatLE(offset); // Target Spread per min (not used)
|
||
offset += 4;
|
||
const appliedSpreadRate = data.readFloatLE(offset); // Applied Spread Rate (Kg/Ha)
|
||
offset += 4;
|
||
const appliedSpreadPerMin = data.readFloatLE(offset); // Applied Spread per min (not used)
|
||
offset += 4;
|
||
const gpsTrim = data.readInt16LE(offset); // GPS TRIM
|
||
offset += 2;
|
||
const manualTrim = data.readInt16LE(offset); // Manual TRIM
|
||
offset += 2;
|
||
|
||
// Part B
|
||
const miscStates = data.readUInt16LE(offset); // Misc States
|
||
offset += 2;
|
||
const gateLevelSteps = data.readUInt16LE(offset); // Gate Level Steps
|
||
offset += 2;
|
||
const encoderPosition = data.readUInt16LE(offset); // Encoder Position
|
||
offset += 2;
|
||
const cumulativeUptimeCPU = data.readUInt16LE(offset); // Cumulative Uptime CPU
|
||
offset += 2;
|
||
const softLevelTarget = data.readUInt16LE(offset); // SOFT Level Target
|
||
offset += 2;
|
||
const pgtPGain = data.readUInt16LE(offset); // PGT P Gain
|
||
offset += 2;
|
||
const pgtGGain = data.readUInt16LE(offset); // PGT G Gain
|
||
offset += 2;
|
||
const pgtTolerance = data.readUInt16LE(offset); // PGT Tolerance
|
||
offset += 2;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.IF2_DRY_GATE_38,
|
||
applicationMode,
|
||
taskMode,
|
||
appliedResolution,
|
||
machineState,
|
||
switchState,
|
||
gateStatus,
|
||
gateSoftState,
|
||
targetSpreadRate,
|
||
targetSpreadPerMin,
|
||
appliedSpreadRate,
|
||
appliedSpreadPerMin,
|
||
gpsTrim,
|
||
manualTrim,
|
||
miscStates,
|
||
gateLevelSteps,
|
||
encoderPosition,
|
||
cumulativeUptimeCPU,
|
||
softLevelTarget,
|
||
pgtPGain,
|
||
pgtGGain,
|
||
pgtTolerance
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse TLEG Dry Gate Record (Type 39)
|
||
* 44 bytes total (40 data + 4 header)
|
||
*/
|
||
parseTLEGDryGate_39(data, context) {
|
||
if (data.length < 40) return null;
|
||
|
||
let offset = 0;
|
||
const applicationOpMode = data.readUInt8(offset); // Application OP Mode
|
||
offset += 1;
|
||
const taskMode = data.readUInt8(offset); // 0=Single/Product Profiles, 1=Levels/FDG
|
||
offset += 1;
|
||
const appliedResolution = data.readUInt8(offset); // 0=1/32", 1=1/16"
|
||
offset += 1;
|
||
const machineState = data.readUInt8(offset); // 0 to 15
|
||
offset += 1;
|
||
const switchState = data.readUInt8(offset); // bit field
|
||
offset += 1;
|
||
const gateState = data.readUInt8(offset); // bit field
|
||
offset += 1;
|
||
const userSelectedGateClosedState = data.readUInt8(offset); // 0=Latched, 1=User SOFT
|
||
offset += 1;
|
||
const tlegInternalTemp = data.readUInt8(offset); // Internal temperature C
|
||
offset += 1;
|
||
const targetSpreadRate = data.readFloatLE(offset); // Kg/Ha
|
||
offset += 4;
|
||
const targetSpreadPerMin = data.readFloatLE(offset); // Kg per min (Not used)
|
||
offset += 4;
|
||
const appliedSpreadRate = data.readFloatLE(offset); // Kg/Ha
|
||
offset += 4;
|
||
const appliedSpreadPerMin = data.readFloatLE(offset); // Kg per min (Not used)
|
||
offset += 4;
|
||
const gpsTrim = data.readInt16LE(offset); // +/- GPS Trimmed Speed Up/Down
|
||
offset += 2;
|
||
const manualTrim = data.readInt16LE(offset); // +/- Manually Trimmed Up/Down
|
||
offset += 2;
|
||
const preGateLevelSteps = data.readUInt16LE(offset); // 0 to 158 in steps of 1/32"
|
||
offset += 2;
|
||
const gateLevelSteps = data.readUInt16LE(offset); // 0 to 158 in steps of 1/32"
|
||
offset += 2;
|
||
const encoderPosition = data.readUInt16LE(offset); // Internal TLEG Encoder 0.0° to 360.0° * 10
|
||
offset += 2;
|
||
const cumulativeUptimeCpu = data.readUInt16LE(offset); // Total hours Uptime
|
||
offset += 2;
|
||
const latchedTargetDegrees = data.readUInt16LE(offset); // 0.0° to 360.0° * 10
|
||
offset += 2;
|
||
const softTargetDegrees = data.readUInt16LE(offset); // 0.0° to 360.0° * 10
|
||
offset += 2;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.TLEG_DRY_GATE_39,
|
||
applicationOpMode,
|
||
taskMode,
|
||
appliedResolution,
|
||
machineState,
|
||
switchState: {
|
||
arm: (switchState & 0x01) ? 1 : 0,
|
||
trigger: (switchState & 0x02) ? 1 : 0,
|
||
fuselage: (switchState & 0x04) ? 1 : 0,
|
||
motor: (switchState & 0x08) ? 1 : 0,
|
||
sprayOn: (switchState & 0x10) ? 1 : 0,
|
||
gateMoving: (switchState & 0x20) ? 1 : 0,
|
||
encoderStatus: (switchState & 0x40) ? 'OK' : 'Error'
|
||
},
|
||
gateState: {
|
||
gateClosedState: (gateState & 0x01) ? 'Soft' : 'Latched',
|
||
gateOpen: (gateState & 0x02) ? 1 : 0,
|
||
gateJam: (gateState & 0x10) ? 1 : 0
|
||
},
|
||
userSelectedGateClosedState,
|
||
tlegInternalTemp,
|
||
targetSpreadRate,
|
||
targetSpreadPerMin,
|
||
appliedSpreadRate,
|
||
appliedSpreadPerMin,
|
||
gpsTrim,
|
||
manualTrim,
|
||
preGateLevelSteps,
|
||
gateLevelSteps,
|
||
encoderPosition: encoderPosition / 10.0, // Convert back to degrees
|
||
cumulativeUptimeCpu,
|
||
latchedTargetDegrees: latchedTargetDegrees / 10.0, // Convert back to degrees
|
||
softTargetDegrees: softTargetDegrees / 10.0 // Convert back to degrees
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse AgDisp Data Record (Type 43)
|
||
* 12 bytes total (8 data + 4 header)
|
||
*/
|
||
parseAgdispData_43(data, context) {
|
||
if (data.length < 8) return null;
|
||
|
||
let offset = 0;
|
||
const windOffsetDirection = data.readFloatLE(offset); // Degrees
|
||
offset += 4;
|
||
const appliedOffsetInMeters = data.readFloatLE(offset); // Meters
|
||
offset += 4;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.AGDISP_DATA_43,
|
||
windOffsetDirection,
|
||
appliedOffsetInMeters
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Micro-RPM Record (Type 52)
|
||
* 25 bytes total (21 data + 4 header)
|
||
*/
|
||
parseMicroRPM_52(data, context) {
|
||
if (data.length < 21) return null;
|
||
|
||
let offset = 0;
|
||
const opMode = data.readUInt8(offset); // 0 or 1 (On/Off)
|
||
offset += 1;
|
||
const microAtomiserLeft1 = data.readInt16LE(offset); // RPM
|
||
offset += 2;
|
||
const microAtomiserLeft2 = data.readInt16LE(offset); // RPM
|
||
offset += 2;
|
||
const microAtomiserLeft3 = data.readInt16LE(offset); // RPM
|
||
offset += 2;
|
||
const microAtomiserLeft4 = data.readInt16LE(offset); // RPM
|
||
offset += 2;
|
||
const microAtomiserLeft5 = data.readInt16LE(offset); // RPM
|
||
offset += 2;
|
||
const microAtomiserRight1 = data.readInt16LE(offset); // RPM
|
||
offset += 2;
|
||
const microAtomiserRight2 = data.readInt16LE(offset); // RPM
|
||
offset += 2;
|
||
const microAtomiserRight3 = data.readInt16LE(offset); // RPM
|
||
offset += 2;
|
||
const microAtomiserRight4 = data.readInt16LE(offset); // RPM
|
||
offset += 2;
|
||
const microAtomiserRight5 = data.readInt16LE(offset); // RPM
|
||
offset += 2;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.MICRO_RPM_52,
|
||
opMode,
|
||
leftAtomisers: [
|
||
microAtomiserLeft1,
|
||
microAtomiserLeft2,
|
||
microAtomiserLeft3,
|
||
microAtomiserLeft4,
|
||
microAtomiserLeft5
|
||
],
|
||
rightAtomisers: [
|
||
microAtomiserRight1,
|
||
microAtomiserRight2,
|
||
microAtomiserRight3,
|
||
microAtomiserRight4,
|
||
microAtomiserRight5
|
||
]
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse SBC (CPU Temps) Record (Type 56)
|
||
* 20 bytes total (16 data + 4 header)
|
||
*/
|
||
parseSBCTemps_56(data, context) {
|
||
if (data.length < 16) return null;
|
||
|
||
let offset = 0;
|
||
const cpuTemp1 = data.readFloatLE(offset); // Degrees Celsius
|
||
offset += 4;
|
||
const cpuTemp2 = data.readFloatLE(offset); // Degrees Celsius
|
||
offset += 4;
|
||
const cpuTemp3 = data.readFloatLE(offset); // Degrees Celsius
|
||
offset += 4;
|
||
const cpuTemp4 = data.readFloatLE(offset); // Degrees Celsius
|
||
offset += 4;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.SBC_TEMPS_56,
|
||
cpuTemperatures: [cpuTemp1, cpuTemp2, cpuTemp3, cpuTemp4]
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Meterate Record (Type 57)
|
||
* 25 bytes total (21 data + 4 header)
|
||
*/
|
||
parseMeterate_57(data, context) {
|
||
if (data.length < 21) return null;
|
||
|
||
let offset = 0;
|
||
const autoManual = data.readUInt8(offset); // Auto or Manual state
|
||
offset += 1;
|
||
const baseSpeed = data.readUInt8(offset); // MPH
|
||
offset += 1;
|
||
const everySpeed = data.readUInt16LE(offset); // MPH (* 100)
|
||
offset += 2;
|
||
const controlVoltage = data.readUInt16LE(offset); // Vdc (* 100)
|
||
offset += 2;
|
||
const tachRpm = data.readUInt16LE(offset); // RPM
|
||
offset += 2;
|
||
const stepsESpeed = data.readUInt8(offset); // RPM steps per E-Speed
|
||
offset += 1;
|
||
const targetSpreadRate = data.readUInt16LE(offset); // Kg/Ha (* 100)
|
||
offset += 2;
|
||
const targetSpreadPerMin = data.readUInt32LE(offset); // Kg per min (* 1000)
|
||
offset += 4;
|
||
const appliedSpreadRate = data.readUInt16LE(offset); // Kg/Ha (* 100)
|
||
offset += 2;
|
||
const appliedSpreadPerMin = data.readUInt32LE(offset); // Kg per min (* 1000)
|
||
offset += 4;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.METERATE_57,
|
||
autoManual,
|
||
baseSpeed,
|
||
everySpeed: everySpeed / 100.0, // Convert back from (* 100)
|
||
controlVoltage: controlVoltage / 100.0, // Convert back from (* 100)
|
||
tachRpm,
|
||
stepsESpeed,
|
||
targetSpreadRate: targetSpreadRate / 100.0, // Convert back from (* 100)
|
||
targetSpreadPerMin: targetSpreadPerMin / 1000.0, // Convert back from (* 1000)
|
||
appliedSpreadRate: appliedSpreadRate / 100.0, // Convert back from (* 100)
|
||
appliedSpreadPerMin: appliedSpreadPerMin / 1000.0 // Convert back from (* 1000)
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Swathing Setup Record (Type 120)
|
||
* Variable length: 21 or 52 bytes (17 or 48 data + 4 header)
|
||
*/
|
||
parseSwathingSetup_120(data, context) {
|
||
if (data.length < 17) return null;
|
||
|
||
let offset = 0;
|
||
const jobId = this.extractNullTerminatedString(data.slice(offset, offset + 11));
|
||
offset += 11;
|
||
const patternType = data.readUInt8(offset); // see Table 2
|
||
offset += 1;
|
||
const patternLR = String.fromCharCode(data.readUInt8(offset)); // 'L' | 'R'
|
||
offset += 1;
|
||
const swathWidth = data.readFloatLE(offset); // meters
|
||
offset += 4;
|
||
|
||
const result = {
|
||
recordType: RECORD_TYPES.SWATHING_SETUP_120,
|
||
jobId,
|
||
patternType,
|
||
patternLR,
|
||
swathWidth
|
||
};
|
||
|
||
// Check for Job Long Label Name (optional 31 bytes)
|
||
if (data.length >= 48) {
|
||
const jobLongLabelName = this.extractNullTerminatedString(data.slice(offset, offset + 31));
|
||
result.jobLongLabelName = jobLongLabelName;
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Parse Flow Setup Record (Type 140)
|
||
* 23 bytes total (19 data + 4 header)
|
||
*/
|
||
parseFlowSetup_140(data, context) {
|
||
if (data.length < 19) return null;
|
||
|
||
let offset = 0;
|
||
const flowControlStatus = data.readUInt8(offset);
|
||
offset += 1;
|
||
const totalSprayLiters = data.readFloatLE(offset); // liters
|
||
offset += 4;
|
||
const valveCalibration = data.readInt16LE(offset);
|
||
offset += 2;
|
||
const meterCalibration = data.readFloatLE(offset); // counts/liter
|
||
offset += 4;
|
||
const applicationPerArea = data.readFloatLE(offset); // liters/hectare
|
||
offset += 4;
|
||
const applicationRate = data.readFloatLE(offset); // liters/minute
|
||
offset += 4;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.FLOW_SETUP_140,
|
||
flowControlStatus: {
|
||
mode: flowControlStatus & 0x03, // 0=OFF, 1=Control ON, 2=Monitor Only
|
||
variable: (flowControlStatus & 0x40) ? true : false, // +0x40 = Variable, else Constant
|
||
dry: (flowControlStatus & 0x80) ? true : false // +0x80 = DRY, else WET
|
||
},
|
||
totalSprayLiters,
|
||
valveCalibration,
|
||
meterCalibration,
|
||
applicationPerArea,
|
||
applicationRate
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Boom Sections Record (Type 142)
|
||
* Total Length: 31 bytes (27 data + 4 header)
|
||
* Per spec: NO timestamp in this record type
|
||
*/
|
||
parseBoomSections_142(data, context) {
|
||
if (data.length < 27) return null;
|
||
|
||
let offset = 0;
|
||
const boomState = data.readUInt8(offset); // 0=Manual, 1=Automatic
|
||
offset += 1;
|
||
|
||
const boomSections = data.readUInt8(offset); // 1, 3, 4, or 5
|
||
offset += 1;
|
||
|
||
const boomValveStates = data.readUInt8(offset); // bit field: 2 or 3 valve states O/C
|
||
offset += 1;
|
||
|
||
const farLeftSection = data.readUInt32LE(offset); // meters ×1000 m
|
||
offset += 4;
|
||
|
||
const leftCenterSection = data.readUInt32LE(offset); // meters ×1000 m
|
||
offset += 4;
|
||
|
||
const leftSection = data.readUInt32LE(offset); // meters ×1000 m
|
||
offset += 4;
|
||
|
||
const centerSection = data.readUInt32LE(offset); // meters ×1000 m
|
||
offset += 4;
|
||
|
||
const rightSection = data.readUInt32LE(offset); // meters ×1000 m
|
||
offset += 4;
|
||
|
||
const farRightSection = data.readUInt32LE(offset); // meters ×1000 m
|
||
offset += 4;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.BOOM_SECTIONS_142,
|
||
boomState,
|
||
boomSections,
|
||
boomValveStates,
|
||
farLeftSection,
|
||
leftCenterSection,
|
||
leftSection,
|
||
centerSection,
|
||
rightSection,
|
||
farRightSection
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Job Info NAME String Record (Type 152)
|
||
* Used ONLY with Falcon and G4 logs
|
||
* Total Length: 42 bytes (38 data + 4 header)
|
||
* Per spec: NO timestamp in this record type
|
||
*/
|
||
parseJobInfoNameString_152(data, context) {
|
||
if (data.length < 38) return null;
|
||
|
||
let offset = 0;
|
||
const jobVersionId = data.readInt16LE(offset); // Job Version ID (2 bytes)
|
||
offset += 2;
|
||
|
||
const jobFileName = this.extractNullTerminatedString(data.slice(offset, offset + 32)); // Job File Long Name (32 bytes ASCIIZ)
|
||
offset += 32;
|
||
|
||
const numberOfPolygons = data.readInt16LE(offset); // Number of Polygons (2 bytes)
|
||
offset += 2;
|
||
|
||
const numberOfPatterns = data.readInt16LE(offset); // Number of Patterns (2 bytes)
|
||
offset += 2;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.JOB_INFO_NAME_STRING_152,
|
||
jobVersionId,
|
||
jobFileName,
|
||
numberOfPolygons,
|
||
numberOfPatterns
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse System Setup Record (Type 100)
|
||
* 43 bytes total (39 data + 4 header)
|
||
*/
|
||
parseSystemSetup_100(data, context) {
|
||
if (data.length < 39) return null;
|
||
|
||
let offset = 0;
|
||
const timestamp = this.parseTimestamp(data, offset);
|
||
offset += 5;
|
||
const pilotName = this.extractNullTerminatedString(data.slice(offset, offset + 11));
|
||
offset += 11;
|
||
const aircraftId = this.extractNullTerminatedString(data.slice(offset, offset + 11));
|
||
offset += 11;
|
||
const loggingInterval = data.readUInt8(offset); // seconds*10
|
||
offset += 1;
|
||
const loggingMinSpeed = data.readFloatLE(offset); // m/sec
|
||
offset += 4;
|
||
const gpsMaskAngle = data.readUInt8(offset); // degrees
|
||
offset += 1;
|
||
const gmtOffset = data.readInt16LE(offset); // minutes
|
||
offset += 2;
|
||
const compassVariation = data.readFloatLE(offset); // degrees
|
||
offset += 4;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.SYSTEM_SETUP_100,
|
||
timestamp,
|
||
pilotName,
|
||
aircraftId,
|
||
loggingInterval: loggingInterval / 10.0, // Convert back to seconds
|
||
loggingMinSpeed,
|
||
gpsMaskAngle,
|
||
gmtOffset,
|
||
compassVariation
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse GPS Record (Type 10)
|
||
* Contains: GDOP, Satellite count, DGPS station info, AIMMS data
|
||
*/
|
||
parseGPS_10(data, context) {
|
||
if (data.length < 10) return null; // Minimum 10 bytes for basic GPS record
|
||
|
||
let offset = 0;
|
||
const gdop = data.readFloatLE(offset);
|
||
offset += 4;
|
||
const satellitesByte = data.readUInt8(offset); // Packed: (# tracked << 4) + # used
|
||
offset += 1;
|
||
const dgpsStationId = data.readInt16LE(offset);
|
||
offset += 2;
|
||
|
||
// Decode satellites byte: upper 4 bits = tracked, lower 4 bits = used
|
||
const satellitesTracked = (satellitesByte >> 4) & 0x0F;
|
||
const satellitesUsed = satellitesByte & 0x0F;
|
||
|
||
const result = {
|
||
recordType: RECORD_TYPES.GPS_10,
|
||
gdop,
|
||
satellitesTracked, // Number of satellites tracked
|
||
satellitesUsed, // Number of satellites used in solution
|
||
dgpsStationId
|
||
};
|
||
if (data.length >= 10) {
|
||
result.aimmsNavSource = data.readUInt8(offset); // 0 = IMU, 1 = GPS
|
||
offset += 1;
|
||
result.aimmsSvInGpsSolution = data.readUInt8(offset);
|
||
offset += 1;
|
||
result.aimmsGpsPosType = data.readUInt8(offset); // 16=SPS, 18=WAAS, 19=Extrapolated, 0=None
|
||
offset += 1;
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Parse Swath Number Record (Type 20)
|
||
*/
|
||
/**
|
||
* Parse Swath Number Record (Type 20)
|
||
* 6 bytes total (2 data + 4 header)
|
||
*/
|
||
parseSwathNumber_20(data, context) {
|
||
if (data.length < 2) return null;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.SWATH_NUMBER_20,
|
||
swathNumber: data.readInt16LE(0) // A-B=1, right: 2,3,4..., left: -2,-3,-4...
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Flow Monitor/Control Record (Type 30)
|
||
* 10 bytes total (6 data + 4 header) - valve position may be optional
|
||
*/
|
||
parseFlowMonitor_30(data, context) {
|
||
if (data.length < 4) return null; // Minimum for flow rate
|
||
|
||
let offset = 0;
|
||
const flowRate = data.readFloatLE(offset); // liters/minute
|
||
offset += 4;
|
||
|
||
const result = {
|
||
recordType: RECORD_TYPES.FLOW_MONITOR_30,
|
||
flowRate
|
||
};
|
||
|
||
// Valve position may or may not exist (legacy support)
|
||
if (data.length >= 6) {
|
||
result.valvePosition = data.readInt16LE(offset);
|
||
}
|
||
|
||
return result;
|
||
}
|
||
|
||
/**
|
||
* Parse Target Application Rates Record (Type 32)
|
||
* Total Length: 9 bytes (4 header + 5 data)
|
||
* Per spec: NO timestamp in this record type
|
||
*/
|
||
parseTargetApplicationRates_32(data, context) {
|
||
if (data.length < 5) return null;
|
||
|
||
let offset = 0;
|
||
const targetRate = data.readFloatLE(offset); // Target Rate LPM (L/min)
|
||
offset += 4;
|
||
const flags = data.readUInt8(offset); // BOOM 0 = Off, 1 = ON (Flow)
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.TARGET_APPLICATION_RATES_32,
|
||
targetRate, // Always in L/min according to specification
|
||
flags
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Applied Rates Record (Type 36)
|
||
* Variable length: 2 + 6 × number_of_channels
|
||
* Per spec: NO timestamp in this record type
|
||
*/
|
||
parseAppliedRates_36(data, context) {
|
||
if (data.length < 2) return null;
|
||
|
||
let offset = 0;
|
||
const numberOfChannels = data.readUInt16LE(offset); // 2 bytes as per spec
|
||
offset += 2;
|
||
|
||
if (numberOfChannels < 0 || numberOfChannels > 41) return null;
|
||
if (data.length < 2 + (6 * numberOfChannels)) return null; // 2 bytes units + 4 bytes rate = 6 per channel
|
||
|
||
const channels = [];
|
||
for (let i = 0; i < numberOfChannels; i++) {
|
||
const units = data.readUInt16LE(offset); // Application units ID (2 bytes per spec)
|
||
offset += 2;
|
||
const rate = data.readFloatLE(offset); // Actual application rate (4 bytes)
|
||
offset += 4;
|
||
|
||
channels.push({
|
||
channelIndex: i + 1,
|
||
units,
|
||
appliedRate: rate
|
||
});
|
||
}
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.APPLIED_RATES_36,
|
||
numberOfChannels,
|
||
channels
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Wind Record (Type 50)
|
||
* 10 bytes total (6 data + 4 header)
|
||
*/
|
||
parseWind_50(data, context) {
|
||
if (data.length < 6) return null;
|
||
|
||
let offset = 0;
|
||
const windDirection = data.readInt16LE(offset); // degrees
|
||
offset += 2;
|
||
const windVelocity = data.readFloatLE(offset); // m/sec
|
||
offset += 4;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.WIND_50,
|
||
windDirection,
|
||
windSpeed: windVelocity // Alias for compatibility
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Marker ASCII Record (Type 60)
|
||
* Variable length: 26 + label length
|
||
*/
|
||
parseMarkerASCII_60(data, context) {
|
||
if (data.length < 22) return null; // Updated: no timestamp, minimum 22 bytes
|
||
|
||
let offset = 0;
|
||
// No timestamp in this record according to spec
|
||
const markerType = data.readUInt8(offset);
|
||
offset += 1;
|
||
const latitude = data.readDoubleLE(offset);
|
||
offset += 8;
|
||
const longitude = data.readDoubleLE(offset);
|
||
offset += 8;
|
||
const altitude = data.readFloatLE(offset);
|
||
offset += 4;
|
||
|
||
if (offset >= data.length) {
|
||
// No label
|
||
return {
|
||
recordType: RECORD_TYPES.MARKER_ASCII_60,
|
||
markerType,
|
||
latitude,
|
||
longitude,
|
||
altitude,
|
||
labelLength: 0,
|
||
text: ''
|
||
};
|
||
}
|
||
|
||
const labelLength = data.readUInt8(offset);
|
||
offset += 1;
|
||
|
||
let labelText = '';
|
||
if (labelLength > 0 && offset < data.length) {
|
||
const labelBytes = data.slice(offset, offset + labelLength);
|
||
labelText = this.extractNullTerminatedString(labelBytes);
|
||
}
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.MARKER_ASCII_60,
|
||
markerType,
|
||
latitude,
|
||
longitude,
|
||
altitude,
|
||
labelLength,
|
||
text: labelText
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Marker Unicode Record (Type 61)
|
||
* Variable length: 26 + label length
|
||
*/
|
||
parseMarkerUnicode_61(data, context) {
|
||
if (data.length < 22) return null; // Updated: no timestamp, minimum 22 bytes
|
||
|
||
let offset = 0;
|
||
// No timestamp in this record according to spec
|
||
const markerType = data.readUInt8(offset);
|
||
offset += 1;
|
||
const latitude = data.readDoubleLE(offset);
|
||
offset += 8;
|
||
const longitude = data.readDoubleLE(offset);
|
||
offset += 8;
|
||
const altitude = data.readFloatLE(offset);
|
||
offset += 4;
|
||
|
||
if (offset >= data.length) {
|
||
// No label
|
||
return {
|
||
recordType: RECORD_TYPES.MARKER_UNICODE_61,
|
||
markerType,
|
||
latitude,
|
||
longitude,
|
||
altitude,
|
||
labelLength: 0,
|
||
text: ''
|
||
};
|
||
}
|
||
|
||
const labelLength = data.readUInt8(offset);
|
||
offset += 1;
|
||
|
||
let labelText = '';
|
||
if (labelLength > 0 && offset < data.length) {
|
||
const labelBytes = data.slice(offset, offset + labelLength);
|
||
labelText = labelBytes.toString('utf16le');
|
||
// Remove null termination
|
||
labelText = labelText.replace(/\0.*$/, '');
|
||
}
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.MARKER_UNICODE_61,
|
||
markerType,
|
||
latitude,
|
||
longitude,
|
||
altitude,
|
||
labelLength,
|
||
text: labelText
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse GPS Status Extended Record (Type 11)
|
||
* 27 bytes total (23 data + 4 header) - Not used at this time May/2020
|
||
*/
|
||
parseGPSStatusExtended_11(data, context) {
|
||
if (data.length < 23) return null;
|
||
|
||
let offset = 0;
|
||
const navMode = data.readUInt16LE(offset);
|
||
offset += 2;
|
||
const ageOfDifferential = data.readUInt16LE(offset);
|
||
offset += 2;
|
||
const reserved1 = data.readUInt32LE(offset);
|
||
offset += 4;
|
||
const reserved2 = data.readUInt32LE(offset);
|
||
offset += 4;
|
||
const gdop = data.readFloatLE(offset);
|
||
offset += 4;
|
||
const hdop = data.readFloatLE(offset);
|
||
offset += 4;
|
||
const satellitesByte = data.readUInt8(offset); // Packed: (# tracked << 4) + # used
|
||
offset += 1;
|
||
const dgpsStationId = data.readUInt16LE(offset);
|
||
offset += 2;
|
||
|
||
// Decode satellites byte: upper 4 bits = tracked, lower 4 bits = used
|
||
const satellitesTracked = (satellitesByte >> 4) & 0x0F;
|
||
const satellitesUsed = satellitesByte & 0x0F;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.GPS_STATUS_EXTENDED_11,
|
||
navMode,
|
||
ageOfDifferential,
|
||
reserved1,
|
||
gdop,
|
||
hdop,
|
||
satellitesTracked, // Number of satellites tracked
|
||
satellitesUsed, // Number of satellites used in solution
|
||
dgpsStationId
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Dual Flow Monitor/Control Record (Type 31) - Deprecated
|
||
* 16 bytes total (12 data + 4 header)
|
||
*/
|
||
parseDualFlowMonitor_31(data, context) {
|
||
if (data.length < 12) return null;
|
||
|
||
let offset = 0;
|
||
const primaryFlowRate = data.readFloatLE(offset);
|
||
offset += 4;
|
||
const secondaryFlowRate = data.readFloatLE(offset);
|
||
offset += 4;
|
||
const primaryValvePosition = data.readInt16LE(offset);
|
||
offset += 2;
|
||
const secondaryValvePosition = data.readInt16LE(offset);
|
||
offset += 2;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.DUAL_FLOW_MONITOR_31,
|
||
primaryFlowRate,
|
||
secondaryFlowRate,
|
||
primaryValvePosition,
|
||
secondaryValvePosition
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse TLEG Dry Gate Record (Type 39)
|
||
* 44 bytes total (40 data + 4 header)
|
||
*/
|
||
parseTLEGDryGateRecord(data, context) {
|
||
if (data.length < 40) return null;
|
||
|
||
let offset = 0;
|
||
const applicationOpMode = data.readUInt8(offset);
|
||
offset += 1;
|
||
const taskMode = data.readUInt8(offset);
|
||
offset += 1;
|
||
const appliedResolution = data.readUInt8(offset);
|
||
offset += 1;
|
||
const machineState = data.readUInt8(offset);
|
||
offset += 1;
|
||
const switchState = data.readUInt8(offset);
|
||
offset += 1;
|
||
const gateState = data.readUInt8(offset);
|
||
offset += 1;
|
||
const userSelectedGateClosedState = data.readUInt8(offset);
|
||
offset += 1;
|
||
const tlegInternalTemp = data.readUInt8(offset);
|
||
offset += 1;
|
||
|
||
const targetSpreadRate = data.readFloatLE(offset);
|
||
offset += 4;
|
||
const targetSpreadPerMin = data.readFloatLE(offset);
|
||
offset += 4;
|
||
const appliedSpreadRate = data.readFloatLE(offset);
|
||
offset += 4;
|
||
const appliedSpreadPerMin = data.readFloatLE(offset);
|
||
offset += 4;
|
||
|
||
const gpsTrim = data.readInt16LE(offset);
|
||
offset += 2;
|
||
const manualTrim = data.readInt16LE(offset);
|
||
offset += 2;
|
||
const preGateLevelSteps = data.readUInt16LE(offset);
|
||
offset += 2;
|
||
const gateLevelSteps = data.readUInt16LE(offset);
|
||
offset += 2;
|
||
const encoderPosition = data.readUInt16LE(offset);
|
||
offset += 2;
|
||
const cumulativeUptimeCpu = data.readUInt16LE(offset);
|
||
offset += 2;
|
||
const latchedTargetDegrees = data.readUInt16LE(offset);
|
||
offset += 2;
|
||
const softTargetDegrees = data.readUInt16LE(offset);
|
||
offset += 2;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.TLEG_DRY_GATE_39,
|
||
applicationOpMode,
|
||
taskMode,
|
||
appliedResolution,
|
||
machineState,
|
||
switchState,
|
||
gateState,
|
||
userSelectedGateClosedState,
|
||
tlegInternalTemp,
|
||
targetSpreadRate,
|
||
targetSpreadPerMin,
|
||
appliedSpreadRate,
|
||
appliedSpreadPerMin,
|
||
gpsTrim,
|
||
manualTrim,
|
||
preGateLevelSteps,
|
||
gateLevelSteps,
|
||
encoderPosition,
|
||
cumulativeUptimeCpu,
|
||
latchedTargetDegrees,
|
||
softTargetDegrees
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse TACH Times Record (Type 45)
|
||
* 12 bytes total (8 data + 4 header)
|
||
*/
|
||
parseTachTimes_45(data, context) {
|
||
if (data.length < 8) return null;
|
||
|
||
let offset = 0;
|
||
const totalTachCurrentTime = data.readUInt32LE(offset);
|
||
offset += 4;
|
||
const totalTachTotalTime = data.readUInt32LE(offset);
|
||
offset += 4;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.TACH_TIMES_45,
|
||
totalTachCurrentTime,
|
||
totalTachTotalTime
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Controller TYPE by Name Record (Type 46)
|
||
* 25 bytes total (21 data + 4 header)
|
||
*/
|
||
parseControllerTypeByName_46(data, context) {
|
||
if (data.length < 21) return null;
|
||
|
||
const controllerType = this.extractNullTerminatedString(data.slice(0, 21));
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.CONTROLLER_TYPE_BY_NAME_46,
|
||
controllerType
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse IF2 Liquid BOOM Pressure Record (Type 47)
|
||
* 12 bytes total (8 data + 4 header)
|
||
*/
|
||
parseIF2LiquidBoomPressure_47(data, context) {
|
||
if (data.length < 8) return null;
|
||
|
||
let offset = 0;
|
||
const if2LiqPriBoomPressure = data.readFloatLE(offset); // Lbs pressure
|
||
offset += 4;
|
||
const if2LiqDualBoomPressure = data.readFloatLE(offset); // Lbs pressure
|
||
offset += 4;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.IF2_LIQUID_BOOM_PRESSURE_47,
|
||
if2LiqPriBoomPressure,
|
||
if2LiqDualBoomPressure
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Laser Altimeter Record (Type 42)
|
||
* 8 bytes total (4 data + 4 header)
|
||
*/
|
||
parseLaserAltimeter_42(data, context) {
|
||
if (data.length < 4) return null;
|
||
|
||
let offset = 0;
|
||
const heightAgl = data.readFloatLE(offset); // meters
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.LASER_ALTIMETER_42,
|
||
heightAgl
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Environmental Record (Type 110)
|
||
* 13 bytes total (9 data + 4 header)
|
||
*/
|
||
parseEnvironmental_110(data, context) {
|
||
if (data.length < 9) return null;
|
||
|
||
let offset = 0;
|
||
const temperature = data.readFloatLE(offset); // °C
|
||
offset += 4;
|
||
const relativeHumidity = data.readUInt8(offset); // % humidity
|
||
offset += 1;
|
||
const barometricPressure = data.readFloatLE(offset); // kPsc
|
||
offset += 4;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.ENVIRONMENTAL_110,
|
||
temperature,
|
||
relativeHumidity,
|
||
barometricPressure
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse Job Info Record (Type 151)
|
||
*/
|
||
parseJobInfoString_151(data, context) {
|
||
if (data.length < 39) return null;
|
||
|
||
let offset = 0;
|
||
const jobId = data.readUInt32LE(offset);
|
||
offset += 4;
|
||
|
||
// Job title is 30 characters, null-terminated
|
||
const jobTitle = this.extractNullTerminatedString(data.slice(offset, offset + 30));
|
||
offset += 30;
|
||
|
||
const numberOfPolygons = data.readUInt16LE(offset);
|
||
offset += 2;
|
||
const numberOfPatterns = data.readUInt16LE(offset);
|
||
offset += 2;
|
||
|
||
return {
|
||
recordType: RECORD_TYPES.JOB_INFO_STRING_151,
|
||
jobId,
|
||
jobTitle,
|
||
numberOfPolygons,
|
||
numberOfPatterns
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Parse 5-byte timestamp from SatLoc format according to LOGFileFormat_Air_3_76.md
|
||
* Returns date components to avoid timezone interpretation issues
|
||
* Validates components and returns null if invalid to prevent NaN timestamps
|
||
* Handles rollover case for legacy 4-bit year encoding
|
||
*
|
||
* Format:
|
||
* Byte 4 = (Y<<4) + Month, where Y is year-1993
|
||
* 4 bytes = ((Y>>4)<<29) + (Day<<24) + (Hour<<19) + (Minute<<13) + (Seconds<<7) + Hundredths
|
||
*
|
||
* Rollover handling:
|
||
* - Modern format: Uses 7 bits for year (3 high + 4 low), valid 1993-2120
|
||
* - Legacy format: Uses 4 bits for year (only low), valid 1993-2008, then rolls over
|
||
* - Example: Year 2009 in legacy format appears as 1993 (0+1993), but we detect and correct it
|
||
*/
|
||
parseTimestamp(data, offset) {
|
||
if (data.length < offset + 5) return null;
|
||
|
||
// Byte 4 (first byte): Y (year low, 4 bits) + Month (4 bits)
|
||
const byte4 = data[offset];
|
||
const yearLow4 = (byte4 >> 4) & 0x0F; // Y low 4 bits (year - 1993)
|
||
const month = byte4 & 0x0F; // Month (4 bits)
|
||
|
||
// Bytes 3-0 (4 bytes): Read as little-endian 32-bit value
|
||
// Formula: ((Y >> 4) << 29) + (Day << 24) + (Hour << 19) + (Minute << 13) + (Seconds << 7) + Hundredths
|
||
const timeValue = data.readUInt32LE(offset + 1);
|
||
|
||
// Extract components according to specification encoding formula
|
||
const yearHigh3 = (timeValue >> 29) & 0x07; // Y high 3 bits (year - 1993)
|
||
const day = (timeValue >> 24) & 0x1F; // Day (5 bits)
|
||
const hour = (timeValue >> 19) & 0x1F; // Hour (5 bits)
|
||
const minute = (timeValue >> 13) & 0x3F; // Minute (6 bits)
|
||
const seconds = (timeValue >> 7) & 0x3F; // Seconds (6 bits)
|
||
const hundredths = timeValue & 0x7F; // Hundredths (7 bits)
|
||
|
||
// Reconstruct full year: combine high 3 bits and low 4 bits
|
||
let yearOffset = (yearHigh3 << 4) | yearLow4;
|
||
|
||
// Handle rollover case for legacy 4-bit year encoding
|
||
// If high 3 bits are 0, this might be legacy format (valid 1993-2008)
|
||
// "If the top three bits of byte 3 are used, this is valid to 2120. If not, it will roll over after 2008."
|
||
if (yearHigh3 === 0 && yearLow4 <= 15) {
|
||
// Legacy 4-bit encoding detected - yearLow4 represents (year - 1993) with 4 bits (0-15)
|
||
// This covers 1993-2008. After 2008, it rolls over.
|
||
|
||
const currentYear = new Date().getFullYear();
|
||
const legacyYear = yearLow4 + 1993; // 1993-2008
|
||
|
||
// Only apply rollover correction if:
|
||
// 1. The parsed year is significantly older than current year (more than 15 years)
|
||
// 2. AND we're in a time period where rollover is likely (after 2008)
|
||
const yearDifference = currentYear - legacyYear;
|
||
const isLikelyRollover = yearDifference > 15 && currentYear >= 2009;
|
||
|
||
if (isLikelyRollover) {
|
||
// Apply rollover: add 16 to get to next 16-year cycle
|
||
// This maps: 0->16, 1->17, ..., 15->31
|
||
// Which gives us: 2009-2024 for the first rollover cycle
|
||
yearOffset = yearLow4 + 16;
|
||
}
|
||
}
|
||
|
||
const year = yearOffset + 1993;
|
||
|
||
// Validate components to prevent invalid timestamps
|
||
// Extended year range to accommodate rollover handling
|
||
const isValid = year >= 1993 && year <= 2120 &&
|
||
month >= 1 && month <= 12 &&
|
||
day >= 1 && day <= 31 &&
|
||
hour >= 0 && hour <= 23 &&
|
||
minute >= 0 && minute <= 59 &&
|
||
seconds >= 0 && seconds <= 59 &&
|
||
hundredths >= 0 && hundredths <= 99;
|
||
if (!isValid) {
|
||
// Return null for invalid timestamps instead of invalid date components
|
||
return null;
|
||
}
|
||
|
||
// Return date components instead of Date object to avoid timezone issues
|
||
return {
|
||
year,
|
||
month,
|
||
day,
|
||
hour,
|
||
minute,
|
||
seconds,
|
||
milliseconds: hundredths * 10
|
||
};
|
||
}
|
||
|
||
/**
|
||
* Create ApplicationDetail record from position data and accumulated context
|
||
* Updated mapping based on SATLOC_TO_APPLICATIONDETAIL_MAPPING.csv
|
||
*/
|
||
createApplicationDetail(positionRecord, fileContext, context = {}) {
|
||
const { currentGPS, currentFlow, currentFlowSetup, currentWind, currentSwath, currentSwathing, currentEnvironmental, currentLaser, currentAppliedRate, currentTargetRate, currentPressure, currentControllerType, currentTach, currentAgdisp, currentSystemSetup } = context;
|
||
|
||
// Extract job information for matching - prioritize filename-based job ID
|
||
const filenameJobId = fileContext.filenameJobId || null;
|
||
const swathingJobId = currentSwathing?.jobId || null;
|
||
const jobLongLabelName = currentSwathing?.jobLongLabelName || null;
|
||
|
||
// Use filename job ID as primary, fall back to jobLongLabelName from Swathing Setup (120)
|
||
const satlocJobId = filenameJobId || jobLongLabelName;
|
||
const aircraftId = currentSystemSetup?.aircraftId || null;
|
||
// Enhanced: boomControlStatus bit 0 = boom on/off. Short: Numeric value: 0 - Boom Off, 2 - Boom On
|
||
const sprayStat = (positionRecord.isEnhanced ? (positionRecord.boomControlStatus & 0x01)
|
||
: (positionRecord.flags == 2)) ? 1 : 0;
|
||
|
||
const appDetail = {
|
||
// Context data
|
||
fileId: fileContext.fileId,
|
||
|
||
// GPS/Position data from Position record (Type 1) - mapped from CSV
|
||
gpsTime: positionRecord.timestamp ?
|
||
(() => {
|
||
|
||
// SatLoc timestamp is local time - create local moment from components, then convert to UTC
|
||
const utcMoment = moment.utc({
|
||
year: positionRecord.timestamp.year,
|
||
month: positionRecord.timestamp.month - 1, // moment expects 0-indexed month
|
||
date: positionRecord.timestamp.day,
|
||
hour: positionRecord.timestamp.hour,
|
||
minute: positionRecord.timestamp.minute,
|
||
second: positionRecord.timestamp.seconds,
|
||
millisecond: positionRecord.timestamp.milliseconds
|
||
}).subtract(currentSystemSetup?.gmtOffset || 0, 'minutes'); // Adjust for GMT offset to get UTC time
|
||
|
||
// Preserve millisecond precision like job worker does
|
||
// Calculate total seconds including milliseconds (similar to utils.timeToSeconds + fixedTo)
|
||
const totalSeconds = utcMoment.unix() + (utcMoment.milliseconds() / 1000);
|
||
return fixedTo(totalSeconds, 3); // 3 decimal places for millisecond precision, mostly centisecond only
|
||
})() : 0,
|
||
lat: positionRecord.lat || 0, // lat -> lat
|
||
lon: positionRecord.lon || 0, // lon -> lon
|
||
tslu: positionRecord.differentialAge || 0, // differentialAge -> tslu (Time since last update)
|
||
xTrack: positionRecord.xTrack || 0, // xTrack -> xTrack
|
||
grSpeed: positionRecord.speed, // fixedTo(positionRecord.speed || 0, 2), // speed -> grSpeed (2 decimal places)
|
||
alt: positionRecord.altitude || 0, // altitude -> alt
|
||
// gpsAlt: positionRecord.altitude || 0, // altitude -> gpsAlt (same source, used for AGNAV RPM pkg only)
|
||
sprayStat: sprayStat,
|
||
head: positionRecord.track, // fixedTo(positionRecord.track || 0, 1), // track -> head (heading in degrees, 1 decimal place)
|
||
|
||
// Swath data - swath width from Swathing Setup record (Type 120)
|
||
swath: currentSwathing?.swathWidth, // fixedTo(currentSwathing?.swathWidth || 0, 1), // swathWidth -> swath (1 decimal place)
|
||
|
||
// GPS quality data from GPS record (Type 10)
|
||
satCount: currentGPS?.satellitesTracked || 0, // satellitesTracked -> satCount
|
||
|
||
// Flow data prioritized from Enhanced Position (1)-> Target Rates (32)-> Flow Monitor (30)-> Flow Setup (140)
|
||
// Get all application and target rates
|
||
...this.getFlowRates(positionRecord, currentFlow, currentFlowSetup, currentAppliedRate, currentTargetRate, currentSwathing?.swathWidth),
|
||
|
||
// Controller type from Controller Type By Name record (Type 46)
|
||
// sens: currentControllerType?.controllerType || '', // controllerType -> sens (string)
|
||
|
||
// Environmental data from Wind record (Type 50)
|
||
windSpd: currentWind?.windSpeed || 0, // windSpeed -> windSpd
|
||
windDir: currentWind?.windDirection || 0, // windDirection -> windDir
|
||
|
||
// Environmental data from Environmental record (Type 110)
|
||
temp: currentEnvironmental?.temperature || 0, // temperature -> temp
|
||
humid: fixedTo(currentEnvironmental?.relativeHumidity || 0, 0), // humidity -> humid (0 decimal places)
|
||
// Convert kPsc to Psi (1 kPsc = 0.14503773773 Psi)
|
||
baroPsi: (currentEnvironmental?.barometricPressure || 0) * 0.14503773773, // barometricPressure -> baroPsi
|
||
|
||
// Valve position from Enhanced Position (Priority 1) or Flow Monitor (Priority 2)
|
||
valvePos: positionRecord.valvePosition || currentFlow?.valvePosition || 0, // valvePosition -> valvePos
|
||
|
||
// Laser altimeter data from Laser Altimeter record (Type 42)
|
||
raserAlt: currentLaser?.laserAltitude || 0, // laserAltitude -> raserAlt
|
||
|
||
// System pressure from IF2 Liquid BOOM Pressure (Type 47) or fallback to position record pressure
|
||
// (1 Lbs = 1 Psi for pressure)
|
||
psi: (currentPressure ? currentPressure?.primaryPressure : currentPressure?.dualPressure) || 0, // if2LiqPriBoomPressure -> psi
|
||
|
||
// System setup data from System Setup record (Type 100)
|
||
// Note: sprayWidth is NOT in the SatLoc spec for Type 100, would need to come from another source
|
||
gmtOffset: currentSystemSetup?.gmtOffset || 0, // gmtOffset -> gmtOffset
|
||
|
||
// Tach data from Tach Times record (Type 45)
|
||
tachSec: currentTach?.totalTachCurrentTime || 0, // totalTachCurrentTime -> tachSec
|
||
tachTotalSec: currentTach?.totalTachTotalTime || 0, // totalTachTotalTime -> tachTotalSec
|
||
|
||
// AgDisp data from AgDisp Data record (Type 43)
|
||
windOffsetDir: currentAgdisp?.windOffsetDirection || 0, // windOffsetDirection -> windOffsetDir
|
||
appWindOffset: currentAgdisp?.appliedOffsetInMeters || 0, // appliedOffsetInMeters -> appWindOffset
|
||
|
||
// Fields not available in SatLoc or not yet mapped (marked as n/a in CSV)
|
||
llnum: 0, // Lock/Spray line (not available)
|
||
timeAdv: 0, // Time advance for GPS & system lag compensation (not available)
|
||
utmX: 0, // UTM X coordinate (would need conversion from lat/lon)
|
||
utmY: 0, // UTM Y coordinate (would need conversion from lat/lon)
|
||
noAC: 0, // Number of aircraft (not available)
|
||
stdHdop: currentGPS?.hdop || 0,
|
||
satsIn: currentGPS?.satellitesUsed || 0, // satellites used -> satsIn
|
||
calcodeFreq: 0, // Calibration code for spray offset (not available)
|
||
sprayHeight: 0, // Spray height from laser altimeter (not available)
|
||
radarAlt: 0, // Radar altitude (not available)
|
||
|
||
// Additional fields that may be used by ApplicationDetail schema
|
||
driftX: 0,
|
||
driftY: 0,
|
||
depositX: 0,
|
||
depositY: 0,
|
||
applicRate: currentAppliedRate?.channels?.[0]?.appliedRate || 0, // Applied rate from Type 36 record
|
||
rpm: [],
|
||
weight: 0,
|
||
// From SatLoc GPS (Type 10) or GPS Status Extended (Type 11)
|
||
gdop: currentGPS?.gdop || 0,
|
||
};
|
||
|
||
return appDetail;
|
||
}
|
||
|
||
/**
|
||
* Get both application and target flow rates with combined prioritization logic
|
||
* Returns all available rate units for flexibility with liquid/solid materials
|
||
* @param {Object} positionRecord - Position record (may be enhanced)
|
||
* @param {Object} currentFlow - Current Flow Monitor record (Type 30)
|
||
* @param {Object} currentFlowSetup - Current Flow Setup record (Type 140)
|
||
* @param {Object} currentAppliedRate - Current Applied Rates record (Type 36)
|
||
* @param {Object} currentTargetRate - Current Target Application Rates record (Type 32)
|
||
* @param {Number} swathWidth - Current Swath width in meters
|
||
* @returns {Object} Object with all rate fields (lminApp, lhaApp, lminReq, lhaReq)
|
||
*/
|
||
getFlowRates(positionRecord, currentFlow, currentFlowSetup, currentAppliedRate, currentTargetRate, swathWidth) {
|
||
const rates = {
|
||
lminApp: 0, // L/min application rate for liquids
|
||
// lhaApp: 0, // L/ha application rate for liquids
|
||
lminReq: 0, // L/min target rate for liquids
|
||
lhaReq: 0, // L/ha target rate for liquids
|
||
};
|
||
|
||
// PRIORITY 1: Enhanced Position record (most accurate, real-time)
|
||
if (positionRecord.isEnhanced) {
|
||
// Application rates from enhanced position
|
||
if (positionRecord.flowRateLmin !== undefined) {
|
||
rates.lminApp = positionRecord.flowRateLmin;
|
||
}
|
||
// if (positionRecord.flowRateLha !== undefined) {
|
||
// rates.lhaApp = positionRecord.flowRateLha;
|
||
// }
|
||
|
||
// Target rates from enhanced position
|
||
if (positionRecord.targetFlowRateLmin !== undefined) {
|
||
rates.lminReq = positionRecord.targetFlowRateLmin;
|
||
}
|
||
if (positionRecord.targetFlowRateLha !== undefined) {
|
||
rates.lhaReq = positionRecord.targetFlowRateLha;
|
||
}
|
||
|
||
// Enhanced position has both app and target data, return it now
|
||
if (positionRecord.flags === 2) { // Boom is ON
|
||
return rates;
|
||
} // Else pass down because there is rates data when boom is OFF
|
||
}
|
||
|
||
// PRIORITY 2: Specific rate records for missing values
|
||
// Target Application Rates (Type 32)
|
||
if (currentTargetRate) {
|
||
// Target rates - Target Application Rates (Type 32) - always in L/min per specification
|
||
if (rates.lminReq === 0 && currentTargetRate?.targetRate !== undefined) {
|
||
rates.lminReq = currentTargetRate.targetRate; // Already in L/min according to specification
|
||
}
|
||
}
|
||
|
||
// Applied Rates (Type 36)
|
||
if (currentAppliedRate?.channels && currentAppliedRate.channels.length > 0) {
|
||
// Use first channel from Applied Rates record
|
||
const firstChannel = currentAppliedRate.channels[0];
|
||
rates.lminApp = firstChannel.appliedRate; // Assuming L/min for now
|
||
}
|
||
|
||
// PRIORITY 3: Flow Setup (Type 140) fallback for any missing values for target rates
|
||
if (currentFlowSetup) {
|
||
if (currentFlowSetup.applicationRate !== undefined) {
|
||
if (rates.lminReq === 0) {
|
||
rates.lminReq = currentFlowSetup.applicationRate;
|
||
}
|
||
}
|
||
if (currentFlowSetup.applicationPerArea !== undefined) {
|
||
if (rates.lhaReq === 0) {
|
||
rates.lhaReq = currentFlowSetup.applicationPerArea;
|
||
}
|
||
}
|
||
}
|
||
|
||
// NOTES: When Controller is ON, if no applied rates found => fallback to Flow Monitor (Type 30) then target rate flow rate
|
||
// This applied for old SatLoc files without enhanced position records from LEGACY (old) systems like Bantam
|
||
if (!positionRecord.isEnhanced && positionRecord.flags === 2 && rates.lminApp === 0) {
|
||
if (currentFlow && currentFlow?.flowRate) {
|
||
rates.lminApp = currentFlow.flowRate; // flowRate is in L/min or Kg/min
|
||
} else {
|
||
rates.lminApp = rates.lminReq; // Use target flow rate
|
||
if (!rates.lminApp && rates.lhaReq && currentFlowSetup && swathWidth) {
|
||
// Fallback: convert per-area rate to per-minute rate using ground speed and swath width
|
||
rates.lminApp = this.convertPerAreaToPerMinute(rates.lhaReq, positionRecord.speed, swathWidth,
|
||
currentFlowSetup.flowControlStatus.dry ? FCTypes.DRY : FCTypes.LIQUID);
|
||
}
|
||
}
|
||
if (!rates.lminReq && rates.lminApp) {
|
||
rates.lminReq = rates.lminApp; // Edgecase: ensure target rate is at least equal to applied rate
|
||
}
|
||
}
|
||
|
||
return rates;
|
||
}
|
||
|
||
/**
|
||
* Convert application rate from per-area to per-minute
|
||
* Supports both liquid (L/ha -> L/min) and solid/dry (Kg/ha -> Kg/min) materials
|
||
*
|
||
* @param {number} ratePerHa - Application rate per hectare (L/ha or Kg/ha)
|
||
* @param {number} groundSpeedMs - Ground speed in meters per second
|
||
* @param {number} swathWidthM - Swath width in meters
|
||
* @param {string} materialType - Material type: FCTypes.LIQUID or FCTypes.DRY
|
||
* @returns {number} Application rate per minute (L/min or Kg/min)
|
||
*/
|
||
convertPerAreaToPerMinute(ratePerHa, groundSpeedMs, swathWidthM, materialType) {
|
||
// Validate inputs
|
||
if (!ratePerHa || ratePerHa <= 0) {
|
||
return 0;
|
||
}
|
||
|
||
if (!groundSpeedMs || groundSpeedMs <= 0) {
|
||
if (this.options.verbose) {
|
||
this.logger.debug('Cannot convert per-area to per-minute rate: missing or invalid ground speed');
|
||
}
|
||
return 0;
|
||
}
|
||
|
||
if (!swathWidthM || swathWidthM <= 0) {
|
||
if (this.options.verbose) {
|
||
this.logger.debug('Cannot convert per-area to per-minute rate: missing or invalid swath width');
|
||
}
|
||
return 0;
|
||
}
|
||
|
||
// Calculate area covered per minute in hectares
|
||
// Formula: area_ha/min = (swath_width_m × ground_speed_m/s × 60_seconds) / 10000_m²/ha
|
||
const areaCoveredPerMinHa = (swathWidthM * groundSpeedMs * 60) / 10000;
|
||
|
||
// Calculate flow rate per minute
|
||
// For liquid: L/min = L/ha × ha/min
|
||
// For dry: Kg/min = Kg/ha × ha/min
|
||
const ratePerMin = ratePerHa * areaCoveredPerMinHa;
|
||
|
||
return ratePerMin;
|
||
}
|
||
|
||
/**
|
||
* Batch insert application details to database
|
||
*/
|
||
async saveApplicationDetails(applicationDetails, options = {}) {
|
||
if (!applicationDetails || applicationDetails.length === 0) {
|
||
return { inserted: 0 };
|
||
}
|
||
|
||
const batchSize = options.batchSize || this.options.batchSize;
|
||
let totalInserted = 0;
|
||
|
||
for (let i = 0; i < applicationDetails.length; i += batchSize) {
|
||
const batch = applicationDetails.slice(i, i + batchSize);
|
||
|
||
try {
|
||
const result = await ApplicationDetail.insertMany(batch, {
|
||
ordered: false,
|
||
lean: true
|
||
});
|
||
totalInserted += result.length;
|
||
|
||
this.logger.info({ batchNumber: Math.floor(i / batchSize) + 1, recordCount: result.length }, `Inserted batch ${Math.floor(i / batchSize) + 1}: ${result.length} records`);
|
||
|
||
} catch (error) {
|
||
this.logger.error({ error: error.message, batchNumber: Math.floor(i / batchSize) + 1 }, `Error inserting batch: ${error.message}`);
|
||
// Continue with next batch on error
|
||
}
|
||
}
|
||
|
||
return { inserted: totalInserted };
|
||
}
|
||
|
||
/**
|
||
* Get parsing statistics
|
||
*/
|
||
getStatistics() {
|
||
return { ...this.statistics };
|
||
}
|
||
}
|
||
|
||
module.exports = { SatLocLogParser, RECORD_TYPES };
|