diff --git a/echopype/convert/ad2cp_fields.yaml b/echopype/convert/ad2cp_fields.yaml new file mode 100644 index 000000000..9fa426c30 --- /dev/null +++ b/echopype/convert/ad2cp_fields.yaml @@ -0,0 +1,1307 @@ +BURST_AVERAGE_VERSION2_DATA_RECORD_FORMAT: + version: + long_name: Field "Version" in the data specification. + units: '' + comment: 'Version number of the Data Record Definition. +Should be 3' + offset_of_data: + long_name: Field "Offset Of Data" in the data specification. + units: '# of bytes' + comment: 'Number of bytes from start of the record to start of +the actual data. + + Data was converted to the specified units.' + serial_number: + long_name: Field "Serial Number" in the data specification. + units: '' + comment: 'Instrument serial number from factory.' + configuration: + long_name: Field "Configuration" in the data specification. + units: '' + comment: 'Record Configuration Bit Mask' + year: + long_name: Field "Year" in the data specification. + units: 'year' + comment: 'Is given as years from 1900.' + month: + long_name: Field "Month" in the data specification. + units: 'month' + comment: 'January is 0.' + day: + long_name: Field "Day" in the data specification. + units: 'day' + comment: '' + hour: + long_name: Field "Hour" in the data specification. + units: 'hour' + comment: '' + minute: + long_name: Field "Minute" in the data specification. + units: 'minute' + comment: '' + seconds: + long_name: Field "Seconds" in the data specification. + units: 's' + comment: '' + microsec100: + long_name: Field "Microsec100" in the data specification. + units: 'μs' + comment: 'Remaining micro seconds (Date object has +milliseconds resolution)' + speed_of_sound: + long_name: Field "Speed Of Sound" in the data specification. + units: m/s + comment: 'Speed of sound used by the instrument. +Raw data given as 0.1m/s. + + Data was converted to the specified units.' + temperature: + long_name: Field "Temperature" in the data specification. + units: degrees Celsius + comment: 'Reading from the temperature sensor. +Raw data given as 0.01 °C. + + Data was converted to the specified units.' + pressure: + long_name: Field "Pressure" in the data specification. + units: dBar + comment: 'Raw data given as 0.001 dBar. + + Data was converted to the specified units.' + heading: + long_name: Field "Heading" in the data specification. + units: degrees + comment: 'Raw data given as 0.01 degrees. + + Data was converted to the specified units.' + pitch: + long_name: Field "Pitch" in the data specification. + units: degrees + comment: 'Raw data given as 0.01 degrees. + + Data was converted to the specified units.' + roll: + long_name: Field "Roll" in the data specification. + units: degrees + comment: 'Raw data given as 0.01 degrees. + + Data was converted to the specified units.' + error: + long_name: Field "Error" in the data specification. + units: '' + comment: 'Error bit mask' + status: + long_name: Field "Status" in the data specification. + units: '' + comment: ' +Bit 31-28: Wakeup State. +1111 0000 0000 0000 0000 0000 0000 0000. +00 = bad power +01 = power applied +10 = break +11 = RTC alarm + +Bit 27-25: Orientation. +0000 1110 0000 0000 0000 0000 0000 0000. +0: "XUP" Instrument x-axis defined up, heading +reference axis is Z positive +1: "XDOWN" Instrument x-axis defined down, +heading reference axis is Z positive +2: "YUP" Instrument y-axis defined up, heading +reference axis is Z positive +3: "YDOWN" Instrument y-axis defined down, +heading reference axis is Z positive +4: "ZUP" Instrument z-axis defined up, heading +reference axis is X positive +5: "ZDOWN" Instrument z-axis defined down, +heading reference axis is X positive +7: "AHRS" AHRS reports orientation any way it +points. Example: Z down -> Roll = 180 deg. + +Bit 24-22: autoOrientation. +0000 0001 1100 0000 0000 0000 0000 0000. +0: "Fixed" Fixed orientation +1: "Auto" Auto Up Down +3: "AHRS3D" AHRS3D' + num_beams_and_coordinate_system_and_num_cells: + long_name: Field "Beams, Coordinates And Cells" in the data specification. + units: '' + comment: 'Bit 11-10 (2 bits): Coordinate system. +b00:ENU, b01:XYZ, b10:BEAM + +Bit 15–12 (4 bits): Number of Beams (NB). +Active beams represented as a 4 charstring of 1s +and 0s. + +Bit 9-0 (10 bits): Number of Cells (NC).' + cell_size: + long_name: Field "Cell Size" in the data specification. + units: m + comment: 'Size of each cell (resolution) on the beam. +Raw data given as mm. + + Data was converted to the specified units.' + blanking: + long_name: Field "Blanking" in the data specification. + units: m + comment: 'Distance from instrument to first data point on the +beam. +Raw data given as mm. + + Data was converted to the specified units.' + velocity_range: + long_name: Field "Velocity Range" in the data specification. + units: m/s + comment: ' + + Data was converted to the specified units.' + battery_voltage: + long_name: Field "Battery Voltage" in the data specification. + units: V + comment: 'Raw value given in 0.1 Volt. + + Data was converted to the specified units.' + magnetometer_raw: + long_name: Field "Magnetometer Raw" in the data specification. + units: '' + comment: 'flux raw value in last measurement interval' + accelerometer_raw_x_axis: + long_name: Field "Accelerometer Raw X Axis" in the data specification. + units: '' + comment: 'raw X axis value in last measurement interval' + accelerometer_raw_y_axis: + long_name: Field "Accelerometer Raw Y Axis" in the data specification. + units: '' + comment: 'raw Y axis value in last measurement interval' + accelerometer_raw_z_axis: + long_name: Field "Accelerometer Raw Z Axis" in the data specification. + units: '' + comment: 'raw Z axis value in last measurement interval' + ambiguity_velocity: + long_name: Field "Ambiguity Velocity" in the data specification. + units: m/s + comment: 'Ambiguity velocity, corrected for sound velocity, +scaled according to Velocity scaling. +Data given as 10^(Velocity scaling) m/s. + + Data was converted to the specified units.' + dataset_description: + long_name: Field "Dataset Description" in the data specification. + units: '' + comment: 'Data set description. +0-2 Physical beam used for 1st data set. +3-5 Physical beam used for 2nd data set. +6-8 Physical beam used for 3th data set. +9-11 Physical beam used for 4th data set. +12-14 Physical beam used for 4th data set.' + transmit_energy: + long_name: Field "Transmit Energy" in the data specification. + units: '' + comment: 'Transmitted energy.' + velocity_scaling: + long_name: Field "Velocity Scaling" in the data specification. + units: '' + comment: 'Velocity scaling used to scale velocity data.' + power_level: + long_name: Field "Power Level" in the data specification. + units: dB + comment: 'Configured power level. + + Data was converted to the specified units.' + velocity_data_burst: + long_name: Field "Velocity Data Burst" in the data specification. + units: m/s + comment: 'This field exists if the Velocity data included bit of +the Config byte is set. +Data given as 10^(Velocity Scaling). + + Data was converted to the specified units.' + velocity_data_average: + long_name: Field "Velocity Data Average" in the data specification. + units: m/s + comment: 'This field exists if the Velocity data included bit of +the Config byte is set. +Data given as 10^(Velocity Scaling). + + Data was converted to the specified units.' + velocity_data_echosounder: + long_name: Field "Velocity Data Echosounder" in the data specification. + units: m/s + comment: 'This field exists if the Velocity data included bit of +the Config byte is set. +Data given as 10^(Velocity Scaling). + + Data was converted to the specified units.' + amplitude_data_burst: + long_name: Field "Amplitude Data Burst" in the data specification. + units: dB/count + comment: 'This field exists if the amplitude data included bit of +the Config byte is set. +Data given as 0.5 dB/count. + + Data was converted to the specified units.' + amplitude_data_average: + long_name: Field "Amplitude Data Average" in the data specification. + units: dB/count + comment: 'This field exists if the amplitude data included bit of +the Config byte is set. +Data given as 0.5 dB/count. + + Data was converted to the specified units.' + amplitude_data_echosounder: + long_name: Field "Amplitude Data Echosounder" in the data specification. + units: dB/count + comment: 'This field exists if the amplitude data included bit of +the Config byte is set. +Data given as 0.5 dB/count. + + Data was converted to the specified units.' + correlation_data_burst: + long_name: Field "Correlation Data Burst" in the data specification. + units: 0-100% + comment: 'This field exists if the Correlation data included bit +of the Config byte is set. +[0 – 100 %] + + Data was converted to the specified units.' + correlation_data_average: + long_name: Field "Correlation Data Average" in the data specification. + units: 0-100% + comment: 'This field exists if the Correlation data included bit +of the Config byte is set. +[0 – 100 %] + + Data was converted to the specified units.' + correlation_data_echosounder: + long_name: Field "Correlation Data Echosounder" in the data specification. + units: 0-100% + comment: 'This field exists if the Correlation data included bit +of the Config byte is set. +[0 – 100 %] + + Data was converted to the specified units.' +BURST_AVERAGE_VERSION3_DATA_RECORD_FORMAT: + version: + long_name: Field "Version" in the data specification. + units: '' + comment: 'Version number of the Data Record Definition. +Should be 3' + offset_of_data: + long_name: Field "Offset Of Data" in the data specification. + units: '# of bytes' + comment: 'Number of bytes from start of the record to start of +the actual data. + + Data was converted to the specified units.' + configuration: + long_name: Field "Configuration" in the data specification. + units: '' + comment: 'Record Configuration Bit Mask' + serial_number: + long_name: Field "Serial Number" in the data specification. + units: '' + comment: 'Instrument serial number from factory.' + year: + long_name: Field "Year" in the data specification. + units: 'year' + comment: 'Is given as years from 1900.' + month: + long_name: Field "Month" in the data specification. + units: 'month' + comment: 'January is 0.' + day: + long_name: Field "Day" in the data specification. + units: 'day' + comment: '' + hour: + long_name: Field "Hour" in the data specification. + units: 'hour' + comment: '' + minute: + long_name: Field "Minute" in the data specification. + units: 'minute' + comment: '' + seconds: + long_name: Field "Seconds" in the data specification. + units: 's' + comment: '' + microsec100: + long_name: Field "Microsec100" in the data specification. + units: 'μs' + comment: 'Remaining micro seconds (Date object has +milliseconds resolution)' + speed_of_sound: + long_name: Field "Speed Of Sound" in the data specification. + units: m/s + comment: 'Speed of sound used by the instrument. +Raw data given as 0.1m/s. + + Data was converted to the specified units.' + temperature: + long_name: Field "Temperature" in the data specification. + units: degrees Celsius + comment: 'Reading from the temperature sensor. +Raw data given as 0.01 °C. + + Data was converted to the specified units.' + pressure: + long_name: Field "Pressure" in the data specification. + units: dBar + comment: 'Raw data given as 0.001 dBar. + + Data was converted to the specified units.' + heading: + long_name: Field "Heading" in the data specification. + units: degrees + comment: 'Raw data given as 0.01 degrees. + + Data was converted to the specified units.' + pitch: + long_name: Field "Pitch" in the data specification. + units: degrees + comment: 'Raw data given as 0.01 degrees. + + Data was converted to the specified units.' + roll: + long_name: Field "Roll" in the data specification. + units: degrees + comment: 'Raw data given as 0.01 degrees. + + Data was converted to the specified units.' + num_beams_and_coordinate_system_and_num_cells: + long_name: Field "Beams, Coordinates And Cells" in the data specification. + units: '' + comment: 'Bit 11-10 (2 bits): Coordinate system. +b00:ENU, b01:XYZ, b10:BEAM + +Bit 15–12 (4 bits): Number of Beams (NB). +Active beams represented as a 4 charstring of 1s +and 0s. + +Bit 9-0 (10 bits): Number of Cells (NC).' + cell_size: + long_name: Field "Cell Size" in the data specification. + units: m + comment: 'Size of each cell (resolution) on the beam. +Raw data given as mm. + + Data was converted to the specified units.' + blanking: + long_name: Field "Blanking" in the data specification. + units: m + comment: 'Distance from instrument to first data point on the +beam. +Raw data given as mm. + + Data was converted to the specified units.' + nominal_correlation: + long_name: Field "Nominal Correlation" in the data specification. + units: '%' + comment: 'The nominal correlation for the configured +combination of cell size and velocity range. + + Data was converted to the specified units.' + temperature_from_pressure_sensor: + long_name: Field "Temperature From Pressure Sensor" in the data specification. + units: degrees Celsius + comment: ' + + Data was converted to the specified units.' + battery_voltage: + long_name: Field "Battery Voltage" in the data specification. + units: V + comment: 'Raw value given in 0.1 Volt. + + Data was converted to the specified units.' + magnetometer_raw: + long_name: Field "Magnetometer Raw" in the data specification. + units: '' + comment: 'flux raw value in last measurement interval' + accelerometer_raw_x_axis: + long_name: Field "Accelerometer Raw X Axis" in the data specification. + units: '' + comment: 'raw X axis value in last measurement interval' + accelerometer_raw_y_axis: + long_name: Field "Accelerometer Raw Y Axis" in the data specification. + units: '' + comment: 'raw Y axis value in last measurement interval' + accelerometer_raw_z_axis: + long_name: Field "Accelerometer Raw Z Axis" in the data specification. + units: '' + comment: 'raw Z axis value in last measurement interval' + ambiguity_velocity_or_echosounder_frequency: + long_name: Field "Ambiguity Velocity Or Echosounder Frequency" in the data specification. + units: '' + comment: 'Ambiguity velocity, corrected for sound velocity, +scaled according to Velocity scaling. +10^(Velocity scaling) m/s OR Echosounder frequency [Hz] + ' + dataset_description: + long_name: Field "Dataset Description" in the data specification. + units: '' + comment: 'Data set description. +0-3 Physical beam used for 1st data set. +4-7 Physical beam used for 2nd data set. +8-11 Physical beam used for 3th data set. +12-16 Physical beam used for 4th data set. +This was parsed and used to set beam coordinate' + transmit_energy: + long_name: Field "Transmit Energy" in the data specification. + units: '' + comment: 'Transmitted energy.' + velocity_scaling: + long_name: Field "Velocity Scaling" in the data specification. + units: '' + comment: 'Velocity scaling used to scale velocity data.' + power_level: + long_name: Field "Power Level" in the data specification. + units: dB + comment: 'Configured power level. + + Data was converted to the specified units.' + magnetometer_temperature: + long_name: Field "Magnetometer Temperature" in the data specification. + units: degrees Celsius + comment: 'Magnetometer temperature reading. +Uncalibrated +Raw data in 1/1000 °C' + real_time_clock_temperature: + long_name: Field "Real Time Clock Temperature" in the data specification. + units: degrees Celsius + comment: 'Real Time Clock temperature reading. + + Data was converted to the specified units.' + error: + long_name: Field "Error" in the data specification. + units: '' + comment: 'Error bit mask' + status0: + long_name: Field "Status0" in the data specification. + units: '' + comment: 'Extended status bit mask. + +0 bit: Indicates that the processor Idles less than 3 percent +1 bit: Indicates that the processor idles less than 6 percent +2 bit: Indicates that the processor idles less than 12 percent +15 bit: If this bit is set the rest of the word/ extended +status should be interpreted +' # 2 bit is also 1 bit in the spec but should be 2 bit + status: + long_name: Field "Status" in the data specification. + units: '' + comment: ' +Bit 31-28: Wakeup State. +1111 0000 0000 0000 0000 0000 0000 0000. +00 = bad power +01 = power applied +10 = break +11 = RTC alarm + +Bit 27-25: Orientation. +0000 1110 0000 0000 0000 0000 0000 0000. +0: "XUP" Instrument x-axis defined up, heading +reference axis is Z positive +1: "XDOWN" Instrument x-axis defined down, +heading reference axis is Z positive +2: "YUP" Instrument y-axis defined up, heading +reference axis is Z positive +3: "YDOWN" Instrument y-axis defined down, +heading reference axis is Z positive +4: "ZUP" Instrument z-axis defined up, heading +reference axis is X positive +5: "ZDOWN" Instrument z-axis defined down, +heading reference axis is X positive +7: "AHRS" AHRS reports orientation any way it +points. Example: Z down -> Roll = 180 deg. + +Bit 24-22: autoOrientation. +0000 0001 1100 0000 0000 0000 0000 0000. +0: "Fixed" Fixed orientation +1: "Auto" Auto Up Down +3: "AHRS3D" AHRS3D' + ensemble_counter: + long_name: Field "Ensemble Counter" in the data specification. + units: '' + comment: 'Counts the number of ensembles in both averaged +and burst data' + velocity_data_burst: + long_name: Field "Velocity Data Burst" in the data specification. + units: m/s + comment: 'This field exists if the Velocity data included bit of +the Config byte is set. +Data given as 10^(Velocity Scaling). + + Data was converted to the specified units.' + velocity_data_average: + long_name: Field "Velocity Data Average" in the data specification. + units: m/s + comment: 'This field exists if the Velocity data included bit of +the Config byte is set. +Data given as 10^(Velocity Scaling). + + Data was converted to the specified units.' + velocity_data_echosounder: + long_name: Field "Velocity Data Echosounder" in the data specification. + units: m/s + comment: 'This field exists if the Velocity data included bit of +the Config byte is set. +Data given as 10^(Velocity Scaling). + + Data was converted to the specified units.' + amplitude_data_burst: + long_name: Field "Amplitude Data Burst" in the data specification. + units: dB/count + comment: 'This field exists if the Velocity data included bit of +the Config byte is set. +Data given as 10^(Velocity Scaling). + + Data was converted to the specified units.' + amplitude_data_average: + long_name: Field "Amplitude Data Average" in the data specification. + units: dB/count + comment: 'This field exists if the amplitude data included bit of +the Config byte is set. +Data given as 0.5 dB/count. + + Data was converted to the specified units.' + amplitude_data_echosounder: + long_name: Field "Amplitude Data Echosounder" in the data specification. + units: dB/count + comment: 'This field exists if the amplitude data included bit of +the Config byte is set. +Data given as 0.5 dB/count. + + Data was converted to the specified units.' + correlation_data_burst: + long_name: Field "Correlation Data Burst" in the data specification. + units: 0-100% + comment: 'This field exists if the Correlation data included bit +of the Config byte is set. +Data given as [0 – 100 %]. + + Data was converted to the specified units.' + correlation_data_average: + long_name: Field "Correlation Data Average" in the data specification. + units: 0-100% + comment: 'This field exists if the Correlation data included bit +of the Config byte is set. +Data given as [0 – 100 %]. + + Data was converted to the specified units.' + correlation_data_echosounder: + long_name: Field "Correlation Data Echosounder" in the data specification. + units: 0-100% + comment: 'This field exists if the Correlation data included bit +of the Config byte is set. +Data given as [0 – 100 %]. + + Data was converted to the specified units.' + altimeter_distance: + long_name: Field "Altimeter Distance" in the data specification. + units: m + comment: 'Distance to surface from Leading Edge algorithm. + + Data was converted to the specified units.' + altimeter_quality: + long_name: Field "Altimeter Quality" in the data specification. + units: '' + comment: 'Result of LE algorithm. When quality is deemed too +low according to instrument specific limits. Distance +and Quality set to 0.' + ast_distance: + long_name: Field "Ast Distance" in the data specification. + units: m + comment: 'Distance to surface from Max Peak algorithm. + + Data was converted to the specified units.' + ast_quality: + long_name: Field "Ast Quality" in the data specification. + units: dB + comment: 'Amplitude at which surface is detected +Raw data in steps of 0.01 dB, i.e. quality of 8000 = 80 +dB. + + Data was converted to the specified units.' + ast_offset_100us: + long_name: Field "Ast Offset 100us" in the data specification. + units: "100 μs" + comment: 'Offset in step of measurement to velocity +measurement. +Raw data given in 100 µs. + + Data was converted to the specified units.' + ast_pressure: + long_name: Field "Ast Pressure" in the data specification. + units: dBar + comment: 'Pressure value measured during the AST/altimeter +ping. + + Data was converted to the specified units.' + altimeter_spare: + long_name: Field "Altimeter Spare" in the data specification. + units: '' + comment: '' + altimeter_raw_data_num_samples: + long_name: Field "Altimeter Raw Data Num Samples" in the data specification. + units: '' + comment: 'Altimeter Raw Data – Number of Samples' + altimeter_raw_data_sample_distance: + long_name: Field "Altimeter Raw Data Sample Distance" in the data specification. + units: m + comment: 'Distance between samples. +Raw data given in 0.1mm. + + Data was converted to the specified units.' + altimeter_raw_data_samples: + long_name: Field "Altimeter Raw Data Samples" in the data specification. + units: '' + comment: 'Altimeter Raw Data – Samples +Raw data given as 16 bits Signed fract' + echosounder_data: + long_name: Field "Echosounder Data" in the data specification. + units: dB/count + comment: 'Echosounder amplitude Data. +Raw data given as 0.01 dB/count. + + Data was converted to the specified units.' + ahrs_rotation_matrix: + long_name: Field "AHRS Rotation Matrix" in the data specification. + units: '' + comment: 'AHRS Rotation Matrix [3x3]' + ahrs_quaternions: + long_name: Field "AHRS Quaternions" in the data specification. + units: '' + comment: 'quaternions' + ahrs_gyro: + long_name: Field "AHRS Gyro" in the data specification. + units: 'dps' + comment: 'Gyro in X direction in degrees prsecond' + percentage_good_data: + long_name: Field "Percentage Good Data" in the data specification. + units: '%' + comment: 'Percent Good Estimate per cell. +These fields exist if the Percentage Good data +included. + + Data was converted to the specified units.' + std_dev_pitch: + long_name: Field "Std Dev Pitch" in the data specification. + units: degrees + comment: 'Standard deviation on pitch data. +Raw data in 0.01 degrees. + + Data was converted to the specified units.' + std_dev_roll: + long_name: Field "Std Dev Roll" in the data specification. + units: degrees + comment: 'Standard deviation on roll data. +Raw data in 0.01 degrees. + + Data was converted to the specified units.' + std_dev_heading: + long_name: Field "Std Dev Heading" in the data specification. + units: degrees + comment: 'Standard deviation on heading data. +Raw data in 0.01 degrees. + + Data was converted to the specified units.' + std_dev_pressure: + long_name: Field "Std Dev Pressure" in the data specification. + units: dBar + comment: 'Standard deviation on pressure data. +Raw data in 0.001 Bar. + + Data was converted to the specified units.' +BOTTOM_TRACK_DATA_RECORD_FORMAT: + version: + long_name: Field "Version" in the data specification. + units: '' + comment: 'Version number of the Data Record Definition. +Should be 3' + offset_of_data: + long_name: Field "Offset Of Data" in the data specification. + units: '# of bytes' + comment: 'Number of bytes from start of the record to start of +the actual data. + + Data was converted to the specified units.' + configuration: + long_name: Field "Configuration" in the data specification. + units: '' + comment: 'Record Configuration Bit Mask' + serial_number: + long_name: Field "Serial Number" in the data specification. + units: '' + comment: 'Instrument serial number from factory.' + year: + long_name: Field "Year" in the data specification. + units: 'year' + comment: 'Is given as years from 1900.' + month: + long_name: Field "Month" in the data specification. + units: 'month' + comment: 'January is 0.' + day: + long_name: Field "Day" in the data specification. + units: 'day' + comment: '' + hour: + long_name: Field "Hour" in the data specification. + units: 'hour' + comment: '' + minute: + long_name: Field "Minute" in the data specification. + units: 'minute' + comment: '' + seconds: + long_name: Field "Seconds" in the data specification. + units: 's' + comment: '' + microsec100: + long_name: Field "Microsec100" in the data specification. + units: 'μs' + comment: 'Remaining micro seconds (Date object has +milliseconds resolution)' + speed_of_sound: + long_name: Field "Speed Of Sound" in the data specification. + units: m/s + comment: 'Speed of sound used by the instrument. +Raw data given as 0.1m/s. + + Data was converted to the specified units.' + temperature: + long_name: Field "Temperature" in the data specification. + units: degrees Celsius + comment: 'Reading from the temperature sensor. +Raw data given as 0.01 °C. + + Data was converted to the specified units.' + pressure: + long_name: Field "Pressure" in the data specification. + units: dBar + comment: 'Raw data given as 0.001 dBar. + + Data was converted to the specified units.' + heading: + long_name: Field "Heading" in the data specification. + units: degrees + comment: 'Raw data given as 0.01 degrees. + + Data was converted to the specified units.' + pitch: + long_name: Field "Pitch" in the data specification. + units: degrees + comment: 'Raw data given as 0.01 degrees. + + Data was converted to the specified units.' + roll: + long_name: Field "Roll" in the data specification. + units: degrees + comment: 'Raw data given as 0.01 degrees. + + Data was converted to the specified units.' + num_beams_and_coordinate_system_and_num_cells: + long_name: Field "Beams, Coordinates And Cells" in the data specification. + units: '' + comment: 'Bit 11-10 (2 bits) describes the coordinate system +used. +b00:ENU, b01:XYZ, b10:BEAM + +Bit 15–12 (4 bits) represent the number of beams (NB). +Active beams represented as a 4 charstring of 1s and 0s. + +Bit 9-0 (10 bits) represent the number of cells (NC).' + cell_size: + long_name: Field "Cell Size" in the data specification. + units: m + comment: 'Size of each cell (resolution) on the beam. +Raw data given as mm. + + Data was converted to the specified units.' + blanking: + long_name: Field "Blanking" in the data specification. + units: m + comment: 'Distance from instrument to first data point on the +beam. +Raw data given as mm. + + Data was converted to the specified units.' + nominal_correlation: + long_name: Field "Nominal Correlation" in the data specification. + units: '%' + comment: 'The nominal correlation for the configured +combination of cell size and velocity range. + + Data was converted to the specified units.' + battery_voltage: + long_name: Field "Battery Voltage" in the data specification. + units: V + comment: 'Raw value given in 0.1 Volt. + + Data was converted to the specified units.' + magnetometer_raw: + long_name: Field "Magnetometer Raw" in the data specification. + units: '' + comment: 's flux raw value in last measurement interval' + accelerometer_raw_x_axis: + long_name: Field "Accelerometer Raw X Axis" in the data specification. + units: '' + comment: 'raw X axis value in last measurement interval' + accelerometer_raw_y_axis: + long_name: Field "Accelerometer Raw Y Axis" in the data specification. + units: '' + comment: 'raw Y axis value in last measurement interval' + accelerometer_raw_z_axis: + long_name: Field "Accelerometer Raw Z Axis" in the data specification. + units: '' + comment: 'raw Z axis value in last measurement interval' + ambiguity_velocity: + long_name: Field "Ambiguity Velocity" in the data specification. + units: m/s + comment: 'Ambiguity velocity, corrected for sound velocity, +scaled according to Velocity scaling. +Data given as 10^(Velocity scaling) m/s. + + Data was converted to the specified units.' + dataset_description: + long_name: Field "Dataset Description" in the data specification. + units: '' + comment: 'Data set description. +0-3 Physical beam used for 1st data set. +4-7 Physical beam used for 2nd data set. +8-11 Physical beam used for 3th data set. +12-16 Physical beam used for 4th data set' + transmit_energy: + long_name: Field "Transmit Energy" in the data specification. + units: '' + comment: 'Transmitted energy.' + velocity_scaling: + long_name: Field "Velocity Scaling" in the data specification. + units: '' + comment: 'Velocity scaling used to scale velocity data.' + power_level: + long_name: Field "Power Level" in the data specification. + units: dB + comment: 'Configured power level. + + Data was converted to the specified units.' + magnetometer_temperature: + long_name: Field "Magnetometer Temperature" in the data specification. + units: degrees Celsius + comment: 'Magnetometer temperature reading. +Uncalibrated +Raw data in 1/1000 °C + + ' + real_time_clock_temperature: + long_name: Field "Real Time Clock Temperature" in the data specification. + units: degrees Celsius + comment: 'Real Time Clock temperature reading. + + Data was converted to the specified units.' + error: + long_name: Field "Error" in the data specification. + units: '' + comment: 'Error bit mask' + status: + long_name: Field "Status" in the data specification. + units: '' + comment: ' +Bit 31-28: Wakeup State. +1111 0000 0000 0000 0000 0000 0000 0000. +00 = bad power +01 = power applied +10 = break +11 = RTC alarm + +Bit 27-25: Orientation. +0000 1110 0000 0000 0000 0000 0000 0000. +0: "XUP" Instrument x-axis defined up, heading +reference axis is Z positive +1: "XDOWN" Instrument x-axis defined down, +heading reference axis is Z positive +2: "YUP" Instrument y-axis defined up, heading +reference axis is Z positive +3: "YDOWN" Instrument y-axis defined down, +heading reference axis is Z positive +4: "ZUP" Instrument z-axis defined up, heading +reference axis is X positive +5: "ZDOWN" Instrument z-axis defined down, +heading reference axis is X positive +7: "AHRS" AHRS reports orientation any way it +points. Example: Z down -> Roll = 180 deg. + +Bit 24-22: autoOrientation. +0000 0001 1100 0000 0000 0000 0000 0000. +0: "Fixed" Fixed orientation +1: "Auto" Auto Up Down +3: "AHRS3D" AHRS3D' + ensemble_counter: + long_name: Field "Ensemble Counter" in the data specification. + units: '' + comment: 'Counts the number of ensembles in both averaged +and burst data' + velocity_data: + long_name: Field "Velocity Data" in the data specification. + units: m/s + comment: 'This field exists if the Velocity data included bit of +the Config byte is set. +Data scaled as product of 10^(Velocity Scaling). + + Data was converted to the specified units.' + distance_data: + long_name: Field "Distance Data" in the data specification. + units: '' + comment: 'This field exists if the Distance data included bit of +the Config byte is set.' + figure_of_merit_data: + long_name: Field "Figure Of Merit Data" in the data specification. + units: '' + comment: 'This field exists if the hasFigureOfMeritData bit of +the Config byte is set.' +ECHOSOUNDER_RAW_DATA_RECORD_FORMAT: + version: + long_name: Field "Version" in the data specification. + units: '' + comment: 'Version number of the Data Record Definition. +Should be 3' + offset_of_data: + long_name: Field "Offset Of Data" in the data specification. + units: '# of bytes' + comment: 'Number of bytes from start of the record to start of +the actual data. + + Data was converted to the specified units.' + year: + long_name: Field "Year" in the data specification. + units: 'uear' + comment: 'Is given as years from 1900.' + month: + long_name: Field "Month" in the data specification. + units: 'month' + comment: 'January is 0.' + day: + long_name: Field "Day" in the data specification. + units: 'day' + comment: '' + hour: + long_name: Field "Hour" in the data specification. + units: 'hour' + comment: '' + minute: + long_name: Field "Minute" in the data specification. + units: 'minute' + comment: '' + seconds: + long_name: Field "Seconds" in the data specification. + units: 's' + comment: '' + microsec100: + long_name: Field "Microsec100" in the data specification. + units: 'μs' + comment: 'Remaining micro seconds (Date object has +milliseconds resolution)' + error: + long_name: Field "Error" in the data specification. + units: '' + comment: 'Error bit mask' + status: + long_name: Field "Status" in the data specification. + units: '' + comment: ' +Bit 31-28: Wakeup State. +1111 0000 0000 0000 0000 0000 0000 0000. +00 = bad power +01 = power applied +10 = break +11 = RTC alarm + +Bit 27-25: Orientation. +0000 1110 0000 0000 0000 0000 0000 0000. +0: "XUP" Instrument x-axis defined up, heading +reference axis is Z positive +1: "XDOWN" Instrument x-axis defined down, +heading reference axis is Z positive +2: "YUP" Instrument y-axis defined up, heading +reference axis is Z positive +3: "YDOWN" Instrument y-axis defined down, +heading reference axis is Z positive +4: "ZUP" Instrument z-axis defined up, heading +reference axis is X positive +5: "ZDOWN" Instrument z-axis defined down, +heading reference axis is X positive +7: "AHRS" AHRS reports orientation any way it +points. Example: Z down -> Roll = 180 deg. + +Bit 24-22: autoOrientation. +0000 0001 1100 0000 0000 0000 0000 0000. +0: "Fixed" Fixed orientation +1: "Auto" Auto Up Down +3: "AHRS3D" AHRS3D' + serial_number: + long_name: Field "Serial Number" in the data specification. + units: '' + comment: 'Instrument serial number from factory.' + num_complex_samples: + long_name: Field "Num Complex Samples" in the data specification. + units: '' + comment: 'Number of following Complex Samples' + ind_start_samples: + long_name: Field "Ind Start Samples" in the data specification. + units: '' + comment: 'Sample number where the position is equal to the +configured blanking distance.' + freq_raw_sample_data: + long_name: Field "Freq Raw Sample Data" in the data specification. + units: 'Hz' + comment: 'Sample Rate' + echosounder_raw_samples: + long_name: Field "Echosounder Raw Samples" in the data specification. + units: '' + comment: '' + echosounder_raw_samples_i: + long_name: Field "Echosounder Raw Samples I" in the data specification. + units: '' + comment: '' + echosounder_raw_samples_q: + long_name: Field "Echosounder Raw Samples Q" in the data specification. + units: '' + comment: '' + echosounder_raw_transmit_samples: + long_name: Field "Echosounder Raw Transmit Samples" in the data specification. + units: '' + comment: '' + echosounder_raw_transmit_samples_i: + long_name: Field "Echosounder Raw Transmit Samples I" in the data specification. + units: '' + comment: '' + echosounder_raw_transmit_samples_q: + long_name: Field "Echosounder Raw Transmit Samples Q" in the data specification. + units: '' + comment: '' +POSTPROCESSED: + pressure_sensor_valid: + long_name: Field "Pressure Sensor Valid" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + temperature_sensor_valid: + long_name: Field "Temperature Sensor Valid" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + compass_sensor_valid: + long_name: Field "Compass Sensor Valid" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + tilt_sensor_valid: + long_name: Field "Tilt Sensor Valid" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + velocity_data_included: + long_name: Field "Velocity Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + amplitude_data_included: + long_name: Field "Amplitude Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + correlation_data_included: + long_name: Field "Correlation Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + altimeter_data_included: + long_name: Field "Altimeter Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + altimeter_raw_data_included: + long_name: Field "Altimeter Raw Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + ast_data_included: + long_name: Field "Ast Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + echosounder_data_included: + long_name: Field "Echosounder Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + ahrs_data_included: + long_name: Field "Ahrs Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + percentage_good_data_included: + long_name: Field "Percentage Good Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + std_dev_data_included: + long_name: Field "Std Dev Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + distance_data_included: + long_name: Field "Distance Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + figure_of_merit_data_included: + long_name: Field "Figure Of Merit Data Included" parsed from field "Configuration" + units: '' + comment: Derived from field "Configuration" + num_cells: + long_name: Field "Number Of Cells" parsed from field "Beams, Coordinates And Cells" + units: '' + comment: Derived from field "Beams, Coordinates And Cells" + num_echosounder_cells: + long_name: Field "Number Of Echosounder Cells" parsed from field "Beams, Coordinates And Cells" + units: '' + comment: Derived from field "Beams, Coordinates And Cells" + coordinate_system: + long_name: Field "Coordinate System" parsed from field "Beams, Coordinates And Cells" + units: '' + comment: Derived from field "Beams, Coordinates And Cells" + num_beams: + long_name: Field "Number Of Beams" parsed from field "Beams, Coordinates And Cells" + units: '' + comment: Derived from field "Beams, Coordinates And Cells" + echosounder_frequency: + long_name: Field "Echosounder Frequency" parsed from field "Ambiguity Velocity Or Echosounder Frequency" + units: Hz + comment: Derived from field "Ambiguity Velocity Or Echosounder Frequency" + ambiguity_velocity: + long_name: Field "Ambiguity Velocity" parsed from field "Ambiguity Velocity Or Echosounder Frequency" + units: m/s + comment: 'Derived from field "Ambiguity Velocity Or Echosounder Frequency." + + Ambiguity velocity, corrected for sound velocity, +scaled according to Velocity scaling. +Data given as 10^(Velocity scaling) m/s. + + Data was converted to the specified units.' + procidle3: + long_name: Field "Processor Idles <3%" parsed from field "Status0" + units: '' + comment: 'Derived from field "Status0". + +Indicates that the processor Idles less than 3 +percent' + procidle6: + long_name: Field "Processor Idles <6%" parsed from field "Status0" + units: '' + comment: 'Derived from field "Status0". + +Indicates that the processor Idles less than 6 +percent' + procidle12: + long_name: Field "Processor Idles <12%" parsed from field "Status0" + units: '' + comment: 'Derived from field "Status0". + +Indicates that the processor Idles less than 12 +percent' + wakeup_state: + long_name: Field "Wakeup State" parsed from field "Status" + units: '' + comment: 'Derived from field "Status". + +Bit 31-28: Wakeup State. +1111 0000 0000 0000 0000 0000 0000 0000. +00 = bad power +01 = power applied +10 = break +11 = RTC alarm' + orientation: + long_name: Field "Orientation" parsed from field "Status" + units: '' + comment: 'Derived from field "Status" + +Bit 27-25: Orientation. +0000 1110 0000 0000 0000 0000 0000 0000. +0: "XUP" Instrument x-axis defined up, heading +reference axis is Z positive +1: "XDOWN" Instrument x-axis defined down, +heading reference axis is Z positive +2: "YUP" Instrument y-axis defined up, heading +reference axis is Z positive +3: "YDOWN" Instrument y-axis defined down, +heading reference axis is Z positive +4: "ZUP" Instrument z-axis defined up, heading +reference axis is X positive +5: "ZDOWN" Instrument z-axis defined down, +heading reference axis is X positive +7: "AHRS" AHRS reports orientation any way it +points. Example: Z down -> Roll = 180 deg.' + autoorientation: + long_name: Field "Auto Orientation" parsed from field "Status" + units: '' + comment: 'Derived from field "Status". + +Bit 24-22: autoOrientation. +0000 0001 1100 0000 0000 0000 0000 0000. +0: "Fixed" Fixed orientation +1: "Auto" Auto Up Down +3: "AHRS3D" AHRS3D' + previous_wakeup_state: + long_name: Field "Previous Wakeup State" parsed from field "Status" + units: '' + comment: 'Derived from field "Status". + +Bit 21-18: Previous wakeup state. +0000 0000 0011 1100 0000 0000 0000 0000. +00 = bad power +01 = power applied +10 = break +11 = RTC alarm' + last_measurement_low_voltage_skip: + long_name: Field "Previous Measurement Skipped Due To Low Voltage" parsed from field "Status" + units: '' + comment: 'Derived from field "Status". + +Bit 17: Last measurement low voltage skip. +0: normal operation +1: last measurement skipped due to low input +voltage' + active_configuration: + long_name: Field "Active Configuration" parsed from field "Status" + units: '' + comment: 'Derived from field "Status". + +Bit 16: Active configuration. +0: Settings for PLAN,BURST,AVG +1: Settings for PLAN1,BURST1,AVG1' + echosounder_index: + long_name: Field "Echosounder Index" parsed from field "Status" + units: '' + comment: 'Derived from field "Status". + +Bit 15-12: Echosounder frequency index. Valid +numbers are 0, 1 and 2 (or 0000, 0001 and 0010) +referring to frequencies 1, 2 or 3 as used in +SET-/GETECHO. +0000 0000 0000 0000 1111 0000 0000 0000' + telemetry_data: + long_name: Field "Telemetry Data" parsed from field "Status" + units: '' + comment: 'Derived from field "Status" + +Telemetry data' + boost_running: + long_name: Field "Boost Running" parsed from field "Status" + units: '' + comment: 'Derived from field "Status" + +Boost running' + echosounder_frequency_bin: + long_name: Field "Echosounder Frequency Bin" parsed from field "Status" + units: '' + comment: 'Derived from field "Status". + +Bit 9-5: Echosounder frequency bin. +Used only on the Signature100 which supports up to +5 packages pr frequency index. Processing is then +done on the different frequency bands. +0000 0000 0000 0000 0000 0011 1110 0000' + bd_scaling: + long_name: Field "Blanking Distance Scaling In Cm" parsed from field "Status" + units: '' + comment: 'Derived from field "Status". + +Bit 1: Scaling of blanking distance. +0: mm scaling +1: given in cm' diff --git a/echopype/convert/api.py b/echopype/convert/api.py index f3530cbe9..c54ed7790 100644 --- a/echopype/convert/api.py +++ b/echopype/convert/api.py @@ -117,19 +117,24 @@ def _save_groups_to_file(echodata, output_path, engine, compress=True): ) # Environment group - io.save_file( - echodata.environment.chunk( - # Making chunking w.r.t. ping_time for AD2CP - # and w.r.t. time1 for the rest of the sensors - {"time1": DEFAULT_CHUNK_SIZE["ping_time"]} - if echodata.top.attrs["keywords"] != "AD2CP" - else {"ping_time": DEFAULT_CHUNK_SIZE["ping_time"]} - ), # TODO: chunking necessary? - path=output_path, - mode="a", - engine=engine, - group="Environment", - ) + if "time1" in echodata.environment: + io.save_file( + echodata.environment.chunk( + {"time1": DEFAULT_CHUNK_SIZE["ping_time"]} + ), # TODO: chunking necessary? + path=output_path, + mode="a", + engine=engine, + group="Environment", + ) + else: + io.save_file( + echodata.environment, + path=output_path, + mode="a", + engine=engine, + group="Environment", + ) # Sonar group io.save_file( @@ -142,18 +147,19 @@ def _save_groups_to_file(echodata, output_path, engine, compress=True): # /Sonar/Beam_groupX group if echodata.sonar_model == "AD2CP": - io.save_file( - echodata.beam.chunk( - { - "ping_time": DEFAULT_CHUNK_SIZE["ping_time"], - } - ), - path=output_path, - mode="a", - engine=engine, - group=f"Sonar/{BEAM_SUBGROUP_DEFAULT}", - compression_settings=COMPRESSION_SETTINGS[engine] if compress else None, - ) + for i in range(1, len(echodata["Sonar"]["beam_group"]) + 1): + io.save_file( + echodata[f"Sonar/Beam_group{i}"].chunk( + { + "ping_time": DEFAULT_CHUNK_SIZE["ping_time"], + } + ), + path=output_path, + mode="a", + engine=engine, + group=f"Sonar/Beam_group{i}", + compression_settings=COMPRESSION_SETTINGS[engine] if compress else None, + ) else: io.save_file( echodata.beam.chunk( @@ -168,20 +174,20 @@ def _save_groups_to_file(echodata, output_path, engine, compress=True): group=f"Sonar/{BEAM_SUBGROUP_DEFAULT}", compression_settings=COMPRESSION_SETTINGS[engine] if compress else None, ) - if echodata.beam_power is not None: - io.save_file( - echodata.beam_power.chunk( - { - "range_sample": DEFAULT_CHUNK_SIZE["range_sample"], - "ping_time": DEFAULT_CHUNK_SIZE["ping_time"], - } - ), - path=output_path, - mode="a", - engine=engine, - group="Sonar/Beam_group2", - compression_settings=COMPRESSION_SETTINGS[engine] if compress else None, - ) + if echodata.beam_power is not None: + io.save_file( + echodata.beam_power.chunk( + { + "range_sample": DEFAULT_CHUNK_SIZE["range_sample"], + "ping_time": DEFAULT_CHUNK_SIZE["ping_time"], + } + ), + path=output_path, + mode="a", + engine=engine, + group="Sonar/Beam_group2", + compression_settings=COMPRESSION_SETTINGS[engine] if compress else None, + ) # Platform group io.save_file( diff --git a/echopype/convert/parse_ad2cp.py b/echopype/convert/parse_ad2cp.py index 91a46384c..deffd1f87 100644 --- a/echopype/convert/parse_ad2cp.py +++ b/echopype/convert/parse_ad2cp.py @@ -100,22 +100,45 @@ class Dimension(Enum): Determines the dimensions of the data in the output dataset """ - PING_TIME = "ping_time" - PING_TIME_AVERAGE = "ping_time_average" - PING_TIME_BURST = "ping_time_burst" - PING_TIME_ECHOSOUNDER = "ping_time_echosounder" - PING_TIME_ECHOSOUNDER_RAW = "ping_time_echosounder_raw" - PING_TIME_ECHOSOUNDER_RAW_TRANSMIT = "ping_time_echosounder_raw_transmit" - BEAM = "beam" - RANGE_SAMPLE_BURST = "range_sample_burst" - RANGE_SAMPLE_AVERAGE = "range_sample_average" - RANGE_SAMPLE_ECHOSOUNDER = "range_sample_echosounder" - NUM_ALTIMETER_SAMPLES = "num_altimeter_samples" - SAMPLE = "sample" - SAMPLE_TRANSMIT = "sample_transmit" - MIJ = "mij" - XYZ = "xyz" - WXYZ = "wxyz" + PING_TIME = auto() + PING_TIME_AVERAGE = auto() + PING_TIME_BURST = auto() + PING_TIME_ECHOSOUNDER = auto() + PING_TIME_ECHOSOUNDER_RAW = auto() + PING_TIME_ECHOSOUNDER_RAW_TRANSMIT = auto() + BEAM = auto() + RANGE_SAMPLE_BURST = auto() + RANGE_SAMPLE_AVERAGE = auto() + RANGE_SAMPLE_ECHOSOUNDER = auto() + NUM_ALTIMETER_SAMPLES = auto() + SAMPLE = auto() + SAMPLE_TRANSMIT = auto() + MIJ = auto() + XYZ = auto() + WXYZ = auto() + + def dimension_name(self) -> str: + return DIMENSION_NAMES[self] + + +DIMENSION_NAMES = { + Dimension.PING_TIME: "time1", + Dimension.PING_TIME_AVERAGE: "ping_time", + Dimension.PING_TIME_BURST: "ping_time", + Dimension.PING_TIME_ECHOSOUNDER: "ping_time", + Dimension.PING_TIME_ECHOSOUNDER_RAW: "ping_time", + Dimension.PING_TIME_ECHOSOUNDER_RAW_TRANSMIT: "ping_time_transmit", + Dimension.BEAM: "beam", + Dimension.RANGE_SAMPLE_BURST: "range_sample", + Dimension.RANGE_SAMPLE_AVERAGE: "range_sample", + Dimension.RANGE_SAMPLE_ECHOSOUNDER: "range_sample", + Dimension.NUM_ALTIMETER_SAMPLES: "num_altimeter_samples", + Dimension.SAMPLE: "range_sample", + Dimension.SAMPLE_TRANSMIT: "transmit_sample", + Dimension.MIJ: "mij", + Dimension.XYZ: "xyz", + Dimension.WXYZ: "wxyz", +} class Field: @@ -135,7 +158,6 @@ def __init__( field_dimensions: Union[List[Dimension], Callable[[DataRecordType], List[Dimension]]] = [ Dimension.PING_TIME ], - field_units: Optional[str] = None, field_unit_conversion: Callable[ ["Ad2cpDataPacket", np.ndarray], np.ndarray ] = lambda _, x: x, @@ -154,7 +176,6 @@ def __init__( n number of m length arrays, etc. field_dimensions: Dimensions of the field in the output dataset - field_units: Label for the units of the field, if any field_unit_conversion: Unit conversion function on field field_exists_predicate: Tests to see whether the field should be parsed at all """ @@ -164,7 +185,6 @@ def __init__( self.field_entry_data_type = field_entry_data_type self.field_shape = field_shape self.field_dimensions = field_dimensions - self.field_units = field_units self.field_unit_conversion = field_unit_conversion self.field_exists_predicate = field_exists_predicate @@ -186,13 +206,6 @@ def default_dimensions() -> List[Dimension]: return [Dimension.PING_TIME] - def units(self): - """ - Returns the field's units - """ - - return self.field_units - F = Field # use F instead of Field to make the repeated fields easier to read @@ -312,9 +325,12 @@ def timestamp(self) -> np.datetime64: minute = self.data["minute"] seconds = self.data["seconds"] microsec100 = self.data["microsec100"] - return np.datetime64( - f"{year:04}-{month:02}-{day:02}T{hour:02}:{minute:02}:{seconds:02}.{microsec100:04}" - ) + try: + return np.datetime64( + f"{year:04}-{month:02}-{day:02}T{hour:02}:{minute:02}:{seconds:02}.{microsec100:04}" + ) # type: ignore + except ValueError: + return np.datetime64("NaT") # type: ignore def is_burst(self) -> bool: """ @@ -618,11 +634,6 @@ def _postprocess(self, field_name): self.data["dataset_description"], [(2, 0), (5, 3), (8, 6), (11, 9), (14, 12)], ) - if ( - self.parser.packets[-1].is_echosounder_raw() - or self.parser.packets[-1].is_echosounder_raw_transmit() - ): - self.parser.packets[-1].data["echosounder_raw_beam"] = self.data["beams"][0] elif ( self.data_record_format == HeaderOrDataRecordFormats.BURST_AVERAGE_VERSION3_DATA_RECORD_FORMAT @@ -726,13 +737,6 @@ def _postprocess(self, field_name): ("bd_scaling", 1, 1), ], ) - if ( - self.parser.packets[-1].is_echosounder_raw() - or self.parser.packets[-1].is_echosounder_raw_transmit() - ): - self.parser.packets[-1].data["echosounder_raw_echogram"] = self.data[ - "echosounder_index" - ] elif self.data_record_format == HeaderOrDataRecordFormats.BOTTOM_TRACK_DATA_RECORD_FORMAT: if field_name == "configuration": self._postprocess_bitfield( @@ -783,6 +787,23 @@ def _postprocess(self, field_name): self.data["echosounder_raw_transmit_samples_q"] = self.data[ "echosounder_raw_transmit_samples" ][:, 1] + elif field_name == "status": + self._postprocess_bitfield( + self.data["status"], + [ + ("wakeup_state", 31, 28), + ("orientation", 27, 25), + ("autoorientation", 24, 22), + ("previous_wakeup_state", 21, 18), + ("last_measurement_low_voltage_skip", 17, 17), + ("active_configuration", 16, 16), + ("echosounder_index", 15, 12), + ("telemetry_data", 11, 11), + ("boost_running", 10, 10), + ("echosounder_frequency_bin", 9, 5), + ("bd_scaling", 1, 1), + ], + ) @staticmethod def checksum(data: bytes) -> int: @@ -814,7 +835,8 @@ class HeaderOrDataRecordFormat: A collection of fields which represents the header format or a data record format """ - def __init__(self, fields: List[Field]): + def __init__(self, name: str, fields: List[Field]): + self.name = name self.fields = OrderedDict([(f.field_name, f) for f in fields]) def get_field(self, field_name: str) -> Optional[Field]: @@ -846,6 +868,7 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec return cls.DATA_RECORD_FORMATS[data_record_type] HEADER_FORMAT: HeaderOrDataRecordFormat = HeaderOrDataRecordFormat( + "HEADER_FORMAT", [ F("sync", 1, UNSIGNED_INTEGER), F("header_size", 1, UNSIGNED_INTEGER), @@ -861,9 +884,10 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec # in (0x23, 0x24) else UNSIGNED_INTEGER), F("data_record_checksum", 2, UNSIGNED_INTEGER), F("header_checksum", 2, UNSIGNED_INTEGER), - ] + ], ) STRING_DATA_RECORD_FORMAT: HeaderOrDataRecordFormat = HeaderOrDataRecordFormat( + "STRING_DATA_RECORD_FORMAT", [ F("string_data_id", 1, UNSIGNED_INTEGER), F( @@ -871,12 +895,13 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec lambda packet: packet.data["data_record_size"] - 1, STRING, ), - ] + ], ) BURST_AVERAGE_VERSION2_DATA_RECORD_FORMAT: HeaderOrDataRecordFormat = HeaderOrDataRecordFormat( + "BURST_AVERAGE_VERSION2_DATA_RECORD_FORMAT", [ F("version", 1, UNSIGNED_INTEGER), - F("offset_of_data", 1, UNSIGNED_INTEGER, field_units="# of bytes"), + F("offset_of_data", 1, UNSIGNED_INTEGER), F("serial_number", 4, UNSIGNED_INTEGER), F("configuration", 2, UNSIGNED_INTEGER), F("year", 1, UNSIGNED_INTEGER), @@ -890,42 +915,36 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "speed_of_sound", 2, UNSIGNED_INTEGER, - field_units="m/s", field_unit_conversion=lambda packet, x: x / 10, ), F( "temperature", 2, SIGNED_INTEGER, - field_units="degrees Celsius", field_unit_conversion=lambda packet, x: x / 100, ), F( "pressure", 4, UNSIGNED_INTEGER, - field_units="dBar", field_unit_conversion=lambda packet, x: x / 1000, ), F( "heading", 2, UNSIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, ), F( "pitch", 2, SIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, ), F( "roll", 2, SIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, ), F("error", 2, UNSIGNED_INTEGER), @@ -935,28 +954,24 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "cell_size", 2, UNSIGNED_INTEGER, - field_units="m", field_unit_conversion=lambda packet, x: x / 1000, ), F( "blanking", 2, UNSIGNED_INTEGER, - field_units="m", field_unit_conversion=lambda packet, x: x / 1000, ), F( "velocity_range", 2, UNSIGNED_INTEGER, - field_units="m/s", field_unit_conversion=lambda packet, x: x / 1000, ), F( "battery_voltage", 2, UNSIGNED_INTEGER, - field_units="V", field_unit_conversion=lambda packet, x: x / 10, ), F( @@ -988,13 +1003,12 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "ambiguity_velocity", 2, UNSIGNED_INTEGER, - field_units="m/s", field_unit_conversion=lambda packet, x: x / 10000, ), F("dataset_description", 2, UNSIGNED_INTEGER), F("transmit_energy", 2, UNSIGNED_INTEGER), F("velocity_scaling", 1, SIGNED_INTEGER), - F("power_level", 1, SIGNED_INTEGER, field_units="dB"), + F("power_level", 1, SIGNED_INTEGER), F(None, 4, UNSIGNED_INTEGER), F( # used when burst "velocity_data_burst", @@ -1009,7 +1023,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_BURST, ], - field_units="m/s", field_unit_conversion=lambda packet, x: x * (10.0 ** packet.data["velocity_scaling"]), field_exists_predicate=lambda packet: packet.is_burst() @@ -1028,7 +1041,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_AVERAGE, ], - field_units="m/s", field_unit_conversion=lambda packet, x: x * (10.0 ** packet.data["velocity_scaling"]), field_exists_predicate=lambda packet: packet.is_average() @@ -1047,7 +1059,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_ECHOSOUNDER, ], - field_units="m/s", field_unit_conversion=lambda packet, x: x * (10.0 ** packet.data["velocity_scaling"]), field_exists_predicate=lambda packet: packet.is_echosounder() @@ -1066,7 +1077,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_BURST, ], - field_units="dB/count", field_unit_conversion=lambda packet, x: x / 2, field_exists_predicate=lambda packet: packet.is_burst() and packet.data["amplitude_data_included"], @@ -1084,7 +1094,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_AVERAGE, ], - field_units="dB/count", field_unit_conversion=lambda packet, x: x / 2, field_exists_predicate=lambda packet: packet.is_average() and packet.data["amplitude_data_included"], @@ -1102,7 +1111,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_ECHOSOUNDER, ], - field_units="dB/count", field_unit_conversion=lambda packet, x: x / 2, field_exists_predicate=lambda packet: packet.is_echosounder() and packet.data["amplitude_data_included"], @@ -1120,7 +1128,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_BURST, ], - field_units="0-100", field_exists_predicate=lambda packet: packet.is_burst() and packet.data["correlation_data_included"], ), @@ -1137,7 +1144,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_AVERAGE, ], - field_units="0-100", field_exists_predicate=lambda packet: packet.is_average() and packet.data["correlation_data_included"], ), @@ -1154,16 +1160,16 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_ECHOSOUNDER, ], - field_units="0-100", field_exists_predicate=lambda packet: packet.is_echosounder() and packet.data["correlation_data_included"], ), - ] + ], ) BURST_AVERAGE_VERSION3_DATA_RECORD_FORMAT: HeaderOrDataRecordFormat = HeaderOrDataRecordFormat( + "BURST_AVERAGE_VERSION3_DATA_RECORD_FORMAT", [ F("version", 1, UNSIGNED_INTEGER), - F("offset_of_data", 1, UNSIGNED_INTEGER, field_units="# of bytes"), + F("offset_of_data", 1, UNSIGNED_INTEGER), F("configuration", 2, UNSIGNED_INTEGER), F("serial_number", 4, UNSIGNED_INTEGER), F("year", 1, UNSIGNED_INTEGER), @@ -1177,42 +1183,36 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "speed_of_sound", 2, UNSIGNED_INTEGER, - field_units="m/s", field_unit_conversion=lambda packet, x: x / 10, ), F( "temperature", 2, SIGNED_INTEGER, - field_units="degrees Celsius", field_unit_conversion=lambda packet, x: x / 100, ), F( "pressure", 4, UNSIGNED_INTEGER, - field_units="dBar", field_unit_conversion=lambda packet, x: x / 1000, ), F( "heading", 2, UNSIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, ), F( "pitch", 2, SIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, ), F( "roll", 2, SIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, ), F("num_beams_and_coordinate_system_and_num_cells", 2, UNSIGNED_INTEGER), @@ -1220,7 +1220,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "cell_size", 2, UNSIGNED_INTEGER, - field_units="m", field_unit_conversion=lambda packet, x: x / 1000, ), # This field is listed to be in cm, but testing has shown that it is actually in mm. @@ -1229,22 +1228,19 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "blanking", 2, UNSIGNED_INTEGER, - field_units="m", field_unit_conversion=lambda packet, x: x / 1000, ), - F("nominal_correlation", 1, UNSIGNED_INTEGER, field_units="%"), + F("nominal_correlation", 1, UNSIGNED_INTEGER), F( "temperature_from_pressure_sensor", 1, UNSIGNED_INTEGER, - field_units="degrees Celsius", field_unit_conversion=lambda packet, x: x * 5, ), F( "battery_voltage", 2, UNSIGNED_INTEGER, - field_units="V", field_unit_conversion=lambda packet, x: x / 10, ), F( @@ -1279,13 +1275,17 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec F("dataset_description", 2, UNSIGNED_INTEGER), F("transmit_energy", 2, UNSIGNED_INTEGER), F("velocity_scaling", 1, SIGNED_INTEGER), - F("power_level", 1, SIGNED_INTEGER, field_units="dB"), - F("magnetometer_temperature", 2, SIGNED_INTEGER), + F("power_level", 1, SIGNED_INTEGER), + F( + "magnetometer_temperature", + 2, + SIGNED_INTEGER, + field_unit_conversion=lambda packet, x: x * 1000, + ), F( "real_time_clock_temperature", 2, SIGNED_INTEGER, - field_units="degrees Celsius", field_unit_conversion=lambda packet, x: x / 100, ), F("error", 2, UNSIGNED_INTEGER), @@ -1305,7 +1305,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_BURST, ], - field_units="m/s", field_unit_conversion=lambda packet, x: x * (10.0 ** packet.data["velocity_scaling"]), field_exists_predicate=lambda packet: packet.is_burst() @@ -1324,7 +1323,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_AVERAGE, ], - field_units="m/s", field_unit_conversion=lambda packet, x: x * (10.0 ** packet.data["velocity_scaling"]), field_exists_predicate=lambda packet: packet.is_average() @@ -1343,7 +1341,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_ECHOSOUNDER, ], - field_units="m/s", field_unit_conversion=lambda packet, x: x * (10.0 ** packet.data["velocity_scaling"]), field_exists_predicate=lambda packet: packet.is_echosounder() @@ -1362,7 +1359,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_BURST, ], - field_units="dB/count", field_unit_conversion=lambda packet, x: x / 2, field_exists_predicate=lambda packet: packet.is_burst() and packet.data["amplitude_data_included"], @@ -1380,7 +1376,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_AVERAGE, ], - field_units="dB/count", field_unit_conversion=lambda packet, x: x / 2, field_exists_predicate=lambda packet: packet.is_average() and packet.data["amplitude_data_included"], @@ -1398,7 +1393,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_ECHOSOUNDER, ], - field_units="dB/count", field_unit_conversion=lambda packet, x: x / 2, field_exists_predicate=lambda packet: packet.is_echosounder() and packet.data["amplitude_data_included"], @@ -1416,7 +1410,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_BURST, ], - field_units="0-100", field_exists_predicate=lambda packet: packet.is_burst() and packet.data["correlation_data_included"], ), @@ -1433,7 +1426,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_AVERAGE, ], - field_units="0-100", field_exists_predicate=lambda packet: packet.is_average() and packet.data["correlation_data_included"], ), @@ -1450,7 +1442,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.BEAM, Dimension.RANGE_SAMPLE_ECHOSOUNDER, ], - field_units="0-100", field_exists_predicate=lambda packet: packet.is_echosounder() and packet.data["correlation_data_included"], ), @@ -1458,7 +1449,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "altimeter_distance", 4, FLOAT, - field_units="m", field_exists_predicate=lambda packet: packet.data["altimeter_data_included"], ), F( @@ -1471,27 +1461,25 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "ast_distance", 4, FLOAT, - field_units="m", field_exists_predicate=lambda packet: packet.data["ast_data_included"], ), F( "ast_quality", 2, UNSIGNED_INTEGER, + field_unit_conversion=lambda packet, x: x / 100, field_exists_predicate=lambda packet: packet.data["ast_data_included"], ), F( "ast_offset_100us", 2, SIGNED_INTEGER, - field_units="100 μs", field_exists_predicate=lambda packet: packet.data["ast_data_included"], ), F( "ast_pressure", 4, FLOAT, - field_units="dBar", field_exists_predicate=lambda packet: packet.data["ast_data_included"], ), F( @@ -1515,7 +1503,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "altimeter_raw_data_sample_distance", 2, UNSIGNED_INTEGER, - field_units="m", field_unit_conversion=lambda packet, x: x / 10000, field_exists_predicate=lambda packet: packet.data["altimeter_raw_data_included"], ), @@ -1538,7 +1525,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.PING_TIME_ECHOSOUNDER, Dimension.RANGE_SAMPLE_ECHOSOUNDER, ], - field_units="dB/count", field_unit_conversion=lambda packet, x: x / 100, field_exists_predicate=lambda packet: packet.data["echosounder_data_included"], ), @@ -1575,7 +1561,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec Dimension.PING_TIME, RANGE_SAMPLES[data_record_type], ], - field_units="%", field_exists_predicate=lambda packet: packet.data["percentage_good_data_included"], ), # Only the pitch field is labeled as included when the "std dev data included" @@ -1584,7 +1569,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "std_dev_pitch", 2, SIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, field_exists_predicate=lambda packet: packet.data["std_dev_data_included"], ), @@ -1592,7 +1576,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "std_dev_roll", 2, SIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, field_exists_predicate=lambda packet: packet.data["std_dev_data_included"], ), @@ -1600,7 +1583,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "std_dev_heading", 2, SIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, field_exists_predicate=lambda packet: packet.data["std_dev_data_included"], ), @@ -1608,7 +1590,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "std_dev_pressure", 2, SIGNED_INTEGER, - field_units="dBar", field_unit_conversion=lambda packet, x: x / 100, field_exists_predicate=lambda packet: packet.data["std_dev_data_included"], ), @@ -1618,12 +1599,13 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec RAW_BYTES, field_exists_predicate=lambda packet: packet.data["std_dev_data_included"], ), - ] + ], ) BOTTOM_TRACK_DATA_RECORD_FORMAT: HeaderOrDataRecordFormat = HeaderOrDataRecordFormat( + "BOTTOM_TRACK_DATA_RECORD_FORMAT", [ F("version", 1, UNSIGNED_INTEGER), - F("offset_of_data", 1, UNSIGNED_INTEGER, field_units="# of bytes"), + F("offset_of_data", 1, UNSIGNED_INTEGER), F("configuration", 2, UNSIGNED_INTEGER), F("serial_number", 4, UNSIGNED_INTEGER), F("year", 1, UNSIGNED_INTEGER), @@ -1637,42 +1619,36 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "speed_of_sound", 2, UNSIGNED_INTEGER, - field_units="m/s", field_unit_conversion=lambda packet, x: x / 10, ), F( "temperature", 2, SIGNED_INTEGER, - field_units="degrees Celsius", field_unit_conversion=lambda packet, x: x / 100, ), F( "pressure", 4, UNSIGNED_INTEGER, - field_units="dBar", field_unit_conversion=lambda packet, x: x / 1000, ), F( "heading", 2, UNSIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, ), F( "pitch", 2, SIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, ), F( "roll", 2, SIGNED_INTEGER, - field_units="degrees", field_unit_conversion=lambda packet, x: x / 100, ), F("num_beams_and_coordinate_system_and_num_cells", 2, UNSIGNED_INTEGER), @@ -1680,23 +1656,20 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec "cell_size", 2, UNSIGNED_INTEGER, - field_units="m", field_unit_conversion=lambda packet, x: x / 1000, ), F( "blanking", 2, UNSIGNED_INTEGER, - field_units="m", field_unit_conversion=lambda packet, x: x / 1000, ), - F("nominal_correlation", 1, UNSIGNED_INTEGER, field_units="%"), + F("nominal_correlation", 1, UNSIGNED_INTEGER), F(None, 1, RAW_BYTES), F( "battery_voltage", 2, UNSIGNED_INTEGER, - field_units="V", field_unit_conversion=lambda packet, x: x / 10, ), F( @@ -1727,17 +1700,21 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec # Unit conversions for this field are done in Ad2cpDataPacket._postprocess # because the ambiguity velocity unit conversion requires the velocity_scaling field, # which is not known when this field is parsed - F("ambiguity_velocity", 4, UNSIGNED_INTEGER, field_units="m/s"), + F("ambiguity_velocity", 4, UNSIGNED_INTEGER), F("dataset_description", 2, UNSIGNED_INTEGER), F("transmit_energy", 2, UNSIGNED_INTEGER), F("velocity_scaling", 1, SIGNED_INTEGER), - F("power_level", 1, SIGNED_INTEGER, field_units="dB"), - F("magnetometer_temperature", 2, SIGNED_INTEGER), + F("power_level", 1, SIGNED_INTEGER), + F( + "magnetometer_temperature", + 2, + SIGNED_INTEGER, + field_unit_conversion=lambda packet, x: x * 1000, + ), F( "real_time_clock_temperature", 2, SIGNED_INTEGER, - field_units="degrees Celsius", field_unit_conversion=lambda packet, x: x / 100, ), F("error", 4, UNSIGNED_INTEGER), @@ -1749,7 +1726,6 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec SIGNED_INTEGER, field_shape=lambda packet: [packet.data.get("num_beams", 0)], field_dimensions=[Dimension.PING_TIME, Dimension.BEAM], - field_units="m/s", field_unit_conversion=lambda packet, x: x * (10.0 ** packet.data["velocity_scaling"]), field_exists_predicate=lambda packet: packet.data["velocity_data_included"], @@ -1771,12 +1747,13 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec field_dimensions=[Dimension.PING_TIME, Dimension.BEAM], field_exists_predicate=lambda packet: packet.data["figure_of_merit_data_included"], ), - ] + ], ) ECHOSOUNDER_RAW_DATA_RECORD_FORMAT: HeaderOrDataRecordFormat = HeaderOrDataRecordFormat( + "ECHOSOUNDER_RAW_DATA_RECORD_FORMAT", [ F("version", 1, UNSIGNED_INTEGER), - F("offset_of_data", 1, UNSIGNED_INTEGER, field_units="# of bytes"), + F("offset_of_data", 1, UNSIGNED_INTEGER), F("year", 1, UNSIGNED_INTEGER), F("month", 1, UNSIGNED_INTEGER), F("day", 1, UNSIGNED_INTEGER), @@ -1865,7 +1842,7 @@ def data_record_format(cls, data_record_type: DataRecordType) -> HeaderOrDataRec ], field_exists_predicate=lambda packet: False, ), - ] + ], ) DATA_RECORD_FORMATS = { diff --git a/echopype/convert/set_groups_ad2cp.py b/echopype/convert/set_groups_ad2cp.py index 525251e6e..4fc692e46 100644 --- a/echopype/convert/set_groups_ad2cp.py +++ b/echopype/convert/set_groups_ad2cp.py @@ -1,8 +1,12 @@ +from enum import Enum, auto, unique +from importlib import resources from typing import Dict, List, Optional, Set, Tuple, Union import numpy as np import xarray as xr +import yaml +from .. import convert from ..utils.coding import set_encodings from .parse_ad2cp import DataType, Dimension, Field, HeaderOrDataRecordFormats from .set_groups_base import SetGroupsBase @@ -14,23 +18,25 @@ } +@unique +class BeamGroup(Enum): + AVERAGE = auto() + BURST = auto() + ECHOSOUNDER = auto() + ECHOSOUNDER_RAW = auto() + + class SetGroupsAd2cp(SetGroupsBase): """Class for saving groups to netcdf or zarr from Ad2cp data files.""" - beamgroups_possible = [ - { - "name": "Beam_group1", - "descr": ( - "contains velocity, correlation, and backscatter power (uncalibrated)" - " data and other data derived from acoustic data." - ), - } - ] - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) + # TODO: bug: 0 if not exist in first string packet + # resulting in index error in setting ds["pulse_compressed"] self.pulse_compressed = self.parser_obj.get_pulse_compressed() self._make_time_coords() + with resources.open_text(convert, "ad2cp_fields.yaml") as f: + self.field_attrs: Dict[str, Dict[str, Dict[str, str]]] = yaml.safe_load(f) # type: ignore # noqa def _make_time_coords(self): timestamps = [] @@ -79,8 +85,8 @@ def _make_dataset(self, var_names: Dict[str, str]) -> xr.Dataset: dims: Dict[str, List[Dimension]] = dict() # {field_name: field dtype} dtypes: Dict[str, np.dtype] = dict() - # {field_name: units} - units: Dict[str, Optional[str]] = dict() + # {field_name: attrs} + attrs: Dict[str, Dict[str, str]] = dict() # {field_name: [idx of padding]} pad_idx: Dict[str, List[int]] = {field_name: [] for field_name in var_names.keys()} # {field_name: field exists} @@ -104,6 +110,10 @@ def _make_dataset(self, var_names: Dict[str, str]) -> xr.Dataset: field_dimensions = Field.default_dimensions() # can't store in dims yet because there might be another data record format # which does have this field + + if field_name not in attrs: + if field_name in self.field_attrs["POSTPROCESSED"]: + attrs[field_name] = self.field_attrs["POSTPROCESSED"][field_name] else: field_dimensions = field.dimensions(packet.data_record_type) @@ -116,8 +126,8 @@ def _make_dataset(self, var_names: Dict[str, str]) -> xr.Dataset: dtypes[field_name] = field.field_entry_data_type.dtype( field_entry_size_bytes ) - if field_name not in units: - units[field_name] = field.units() + if field_name not in attrs: + attrs[field_name] = self.field_attrs[data_record_format.name][field_name] if field_name in packet.data: # field is in this packet fields[field_name].append(packet.data[field_name]) @@ -193,11 +203,9 @@ def _make_dataset(self, var_names: Dict[str, str]) -> xr.Dataset: Union[Tuple[List[str], np.ndarray, Dict[str, str]], Tuple[Tuple[()], None]], ] = { var_name: ( - [dim.value for dim in dims[field_name]], + [dim.dimension_name() for dim in dims[field_name]], combined_fields[field_name], - {"Units": units[field_name]} - if field_name in units and units[field_name] is not None - else {}, + attrs.get(field_name, {}), ) if field_exists[field_name] else ((), None) @@ -206,15 +214,15 @@ def _make_dataset(self, var_names: Dict[str, str]) -> xr.Dataset: coords: Dict[str, np.ndarray] = dict() for time_dim, time_idxs in self.times_idx.items(): if time_dim in used_dims: - coords[time_dim.value] = self.timestamps[time_idxs] + coords[time_dim.dimension_name()] = self.timestamps[time_idxs] for ahrs_dim, ahrs_coords in AHRS_COORDS.items(): if ahrs_dim in used_dims: - coords[ahrs_dim.value] = ahrs_coords + coords[ahrs_dim.dimension_name()] = ahrs_coords if Dimension.BEAM in used_dims and beam_coords is not None: - coords[Dimension.BEAM.value] = beam_coords + coords[Dimension.BEAM.dimension_name()] = beam_coords ds = xr.Dataset(data_vars=data_vars, coords=coords) # make arange coords for the remaining dims - non_coord_dims = {dim.value for dim in used_dims} - set(ds.coords.keys()) + non_coord_dims = {dim.dimension_name() for dim in used_dims} - set(ds.coords.keys()) ds = ds.assign_coords({dim: np.arange(ds.dims[dim]) for dim in non_coord_dims}) return ds @@ -227,13 +235,6 @@ def set_env(self) -> xr.Dataset: } ) - # FIXME: this is a hack because the current file saving - # mechanism requires that the env group have ping_time as a dimension, - # but ping_time might not be a dimension if the dataset is completely - # empty - if "ping_time" not in ds.dims: - ds = ds.expand_dims(dim="ping_time") - return set_encodings(ds) def set_platform(self) -> xr.Dataset: @@ -242,7 +243,6 @@ def set_platform(self) -> xr.Dataset: "heading": "heading", "pitch": "pitch", "roll": "roll", - "magnetometer_raw": "magnetometer_raw", } ) ds = ds.assign_attrs( @@ -254,68 +254,222 @@ def set_platform(self) -> xr.Dataset: ) return set_encodings(ds) - def set_beam(self) -> xr.Dataset: + def set_beam(self) -> List[xr.Dataset]: # TODO: should we divide beam into burst/average (e.g., beam_burst, beam_average) # like was done for range_bin (we have range_bin_burst, range_bin_average, # and range_bin_echosounder)? - ds = self._make_dataset( - { - "num_beams": "number_of_beams", - "coordinate_system": "coordinate_system", - "num_cells": "number_of_cells", - "blanking": "blanking", - "cell_size": "cell_size", - "velocity_range": "velocity_range", - "echosounder_frequency": "echosounder_frequency", - "ambiguity_velocity": "ambiguity_velocity", - "dataset_description": "data_set_description", - "transmit_energy": "transmit_energy", - "velocity_scaling": "velocity_scaling", - "velocity_data_burst": "velocity_burst", - "velocity_data_average": "velocity_average", - "amplitude_data_burst": "amplitude_burst", - "amplitude_data_average": "amplitude_average", - "correlation_data_burst": "correlation_burst", - "correlation_data_average": "correlation_average", - "correlation_data_echosounder": "correlation_echosounder", - "echosounder_data": "amplitude_echosounder", - "figure_of_merit_data": "figure_of_merit", - "altimeter_distance": "altimeter_distance", - "altimeter_quality": "altimeter_quality", - "ast_distance": "ast_distance", - "ast_quality": "ast_quality", - "ast_offset_100us": "ast_offset_100us", - "ast_pressure": "ast_pressure", - "altimeter_spare": "altimeter_spare", - "altimeter_raw_data_num_samples": "altimeter_raw_data_num_samples", - "altimeter_raw_data_sample_distance": "altimeter_raw_data_sample_distance", - "altimeter_raw_data_samples": "altimeter_raw_data_samples", - } - ) - ds = ds.assign_attrs({"pulse_compressed": self.pulse_compressed}) + beam_groups = [] + self._beamgroups = [] + beam_groups_exist = set() + + for packet in self.parser_obj.packets: + if packet.is_average(): + beam_groups_exist.add(BeamGroup.AVERAGE) + elif packet.is_burst(): + beam_groups_exist.add(BeamGroup.BURST) + elif packet.is_echosounder(): + beam_groups_exist.add(BeamGroup.ECHOSOUNDER) + elif packet.is_echosounder_raw(): + beam_groups_exist.add(BeamGroup.ECHOSOUNDER_RAW) + + if len(beam_groups_exist) == len(BeamGroup): + break + + # average + if BeamGroup.AVERAGE in beam_groups_exist: + beam_groups.append( + self._make_dataset( + { + "num_beams": "number_of_beams", + "coordinate_system": "coordinate_system", + "num_cells": "number_of_cells", + "blanking": "blanking", + "cell_size": "cell_size", + "velocity_range": "velocity_range", + "echosounder_frequency": "echosounder_frequency", + "ambiguity_velocity": "ambiguity_velocity", + "dataset_description": "data_set_description", + "transmit_energy": "transmit_energy", + "velocity_scaling": "velocity_scaling", + "velocity_data_average": "velocity", + "amplitude_data_average": "amplitude", + "correlation_data_average": "correlation", + } + ) + ) + + self._beamgroups.append( + { + "name": f"Beam_group{len(self._beamgroups) + 1}", + "descr": ( + "contains echo intensity, velocity and correlation data " + "as well as other configuration parameters from the Average mode." + ), + } + ) + # burst + if BeamGroup.BURST in beam_groups_exist: + beam_groups.append( + self._make_dataset( + { + "num_beams": "number_of_beams", + "coordinate_system": "coordinate_system", + "num_cells": "number_of_cells", + "blanking": "blanking", + "cell_size": "cell_size", + "velocity_range": "velocity_range", + "echosounder_frequency": "echosounder_frequency", + "ambiguity_velocity": "ambiguity_velocity", + "dataset_description": "data_set_description", + "transmit_energy": "transmit_energy", + "velocity_scaling": "velocity_scaling", + "velocity_data_burst": "velocity", + "amplitude_data_burst": "amplitude", + "correlation_data_burst": "correlation", + } + ) + ) + + self._beamgroups.append( + { + "name": f"Beam_group{len(self._beamgroups) + 1}", + "descr": ( + "contains echo intensity, velocity and correlation data " + "as well as other configuration parameters from the Burst mode." + ), + } + ) + # echosounder + if BeamGroup.ECHOSOUNDER in beam_groups_exist: + ds = self._make_dataset( + { + "num_beams": "number_of_beams", + "coordinate_system": "coordinate_system", + "num_cells": "number_of_cells", + "blanking": "blanking", + "cell_size": "cell_size", + "velocity_range": "velocity_range", + "echosounder_frequency": "echosounder_frequency", + "ambiguity_velocity": "ambiguity_velocity", + "dataset_description": "data_set_description", + "transmit_energy": "transmit_energy", + "velocity_scaling": "velocity_scaling", + "correlation_data_echosounder": "correlation", + "echosounder_data": "amplitude", + } + ) + ds = ds.assign_coords({"echogram": np.arange(3)}) + pulse_compressed = np.zeros(3) + # TODO: bug: if self.pulse_compress=0 this will set the last index to 1 + pulse_compressed[self.pulse_compressed - 1] = 1 + ds["pulse_compressed"] = (("echogram",), pulse_compressed) + beam_groups.append(ds) + + self._beamgroups.append( + { + "name": f"Beam_group{len(self._beamgroups) + 1}", + "descr": ( + "contains backscatter echo intensity and other configuration " + "parameters from the Echosounder mode. " + "Data can be pulse compressed or raw intensity." + ), + } + ) + # echosounder raw + if BeamGroup.ECHOSOUNDER_RAW in beam_groups_exist: + beam_groups.append( + self._make_dataset( + { + "num_beams": "number_of_beams", + "coordinate_system": "coordinate_system", + "num_cells": "number_of_cells", + "blanking": "blanking", + "cell_size": "cell_size", + "velocity_range": "velocity_range", + "echosounder_frequency": "echosounder_frequency", + "ambiguity_velocity": "ambiguity_velocity", + "dataset_description": "data_set_description", + "transmit_energy": "transmit_energy", + "velocity_scaling": "velocity_scaling", + "num_complex_samples": "num_complex_samples", + "ind_start_samples": "ind_start_samples", + "freq_raw_sample_data": "freq_raw_sample_data", + "echosounder_raw_samples_i": "backscatter_r", + "echosounder_raw_samples_q": "backscatter_i", + "echosounder_raw_transmit_samples_i": "transmit_pulse_r", + "echosounder_raw_transmit_samples_q": "transmit_pulse_i", + } + ) + ) + + self._beamgroups.append( + { + "name": f"Beam_group{len(self._beamgroups) + 1}", + "descr": ( + "contains complex backscatter raw samples and other configuration " + "parameters from the Echosounder mode, " + "including complex data from the transmit pulse." + ), + } + ) # FIXME: this is a hack because the current file saving # mechanism requires that the beam group have ping_time as a dimension, # but ping_time might not be a dimension if the dataset is completely # empty - if "ping_time" not in ds.dims: - ds = ds.expand_dims(dim="ping_time") + for i, ds in enumerate(beam_groups): + if "ping_time" not in ds.dims: + beam_groups[i] = ds.expand_dims(dim="ping_time") - return set_encodings(ds) + # remove time1 from beam groups + for i, ds in enumerate(beam_groups): + beam_groups[i] = ds.sel(time1=ds["ping_time"]).drop_vars("time1", errors="ignore") + + return [set_encodings(ds) for ds in beam_groups] def set_vendor(self) -> xr.Dataset: ds = self._make_dataset( { "version": "data_record_version", + "pressure_sensor_valid": "pressure_sensor_valid", + "temperature_sensor_valid": "temperature_sensor_valid", + "compass_sensor_valid": "compass_sensor_valid", + "tilt_sensor_valid": "tilt_sensor_valid", + "velocity_data_included": "velocity_data_included", + "amplitude_data_included": "amplitude_data_included", + "correlation_data_included": "correlation_data_included", + "altimeter_data_included": "altimeter_data_included", + "altimeter_raw_data_included": "altimeter_raw_data_included", + "ast_data_included": "ast_data_included", + "echosounder_data_included": "echosounder_data_included", + "ahrs_data_included": "ahrs_data_included", + "percentage_good_data_included": "percentage_good_data_included", + "std_dev_data_included": "std_dev_data_included", + "distance_data_included": "distance_data_included", + "figure_of_merit_data_included": "figure_of_merit_data_included", "error": "error", - "status": "status", "status0": "status0", + "procidle3": "procidle3", + "procidle6": "procidle6", + "procidle12": "procidle12", + "status": "status", + "wakeup_state": "wakeup_state", + "orientation": "orientation", + "autoorientation": "autoorientation", + "previous_wakeup_state": "previous_wakeup_state", + "last_measurement_low_voltage_skip": "last_measurement_low_voltage_skip", + "active_configuration": "active_configuration", + "echosounder_index": "echosounder_index", + "telemetry_data": "telemetry_data", + "boost_running": "boost_running", + "echosounder_frequency_bin": "echosounder_frequency_bin", + "bd_scaling": "bd_scaling", "battery_voltage": "battery_voltage", "power_level": "power_level", "temperature_from_pressure_sensor": "temperature_of_pressure_sensor", "nominal_correlation": "nominal_correlation", "magnetometer_temperature": "magnetometer_temperature", - "real_ping_time_clock_temperature": "real_ping_time_clock_temperature", + "real_time_clock_temperature": "real_time_clock_temperature", "ensemble_counter": "ensemble_counter", "ahrs_rotation_matrix": "ahrs_rotation_matrix_mij", "ahrs_quaternions": "ahrs_quaternions_wxyz", @@ -329,22 +483,20 @@ def set_vendor(self) -> xr.Dataset: "temperature_sensor_valid": "temperature_sensor_valid", "compass_sensor_valid": "compass_sensor_valid", "tilt_sensor_valid": "tilt_sensor_valid", - "echosounder_raw_samples_i": "echosounder_raw_samples_i", - "echosounder_raw_samples_q": "echosounder_raw_samples_q", - "echosounder_raw_transmit_samples_i": "echosounder_raw_transmit_samples_i", - "echosounder_raw_transmit_samples_q": "echosounder_raw_transmit_samples_q", - "echosounder_raw_beam": "echosounder_raw_beam", - "echosounder_raw_echogram": "echosounder_raw_echogram", + "figure_of_merit_data": "figure_of_merit", + "altimeter_distance": "altimeter_distance", + "altimeter_quality": "altimeter_quality", + "ast_distance": "ast_distance", + "ast_quality": "ast_quality", + "ast_offset_100us": "ast_offset_100us", + "ast_pressure": "ast_pressure", + "altimeter_spare": "altimeter_spare", + "altimeter_raw_data_num_samples": "altimeter_raw_data_num_samples", + "altimeter_raw_data_sample_distance": "altimeter_raw_data_sample_distance", + "altimeter_raw_data_samples": "altimeter_raw_data_samples", + "magnetometer_raw": "magnetometer_raw", } ) - ds = ds.assign_attrs({"pulse_compressed": self.pulse_compressed}) - - # FIXME: this is a hack because the current file saving - # mechanism requires that the vendor group have ping_time as a dimension, - # but ping_time might not be a dimension if the dataset is completely - # empty - if "ping_time" not in ds.dims: - ds = ds.expand_dims(dim="ping_time") return set_encodings(ds) @@ -353,7 +505,6 @@ def set_sonar(self) -> xr.Dataset: # Add beam_group and beam_group_descr variables sharing a common dimension # (beam_group), using the information from self._beamgroups - self._beamgroups = self.beamgroups_possible beam_groups_vars, beam_groups_coord = self._beam_groups_vars() ds = xr.Dataset(beam_groups_vars, coords=beam_groups_coord) @@ -361,16 +512,20 @@ def set_sonar(self) -> xr.Dataset: sonar_attr_dict = { "sonar_manufacturer": "Nortek", "sonar_model": "AD2CP", - "sonar_serial_number": "", + "sonar_serial_number": ", ".join( + np.unique( + [ + str(packet.data["serial_number"]) + for packet in self.parser_obj.packets + if "serial_number" in packet.data + ] + ) + ), "sonar_software_name": "", "sonar_software_version": "", "sonar_firmware_version": "", "sonar_type": "acoustic Doppler current profiler (ADCP)", } - for packet in self.parser_obj.packets: - if "serial_number" in packet.data: - ds.attrs["sonar_serial_number"] = packet.data["serial_number"] - break firmware_version = self.parser_obj.get_firmware_version() if firmware_version is not None: sonar_attr_dict["sonar_firmware_version"] = ", ".join( @@ -379,4 +534,4 @@ def set_sonar(self) -> xr.Dataset: ds = ds.assign_attrs(sonar_attr_dict) - return ds + return set_encodings(ds) diff --git a/echopype/echodata/convention/1.0.yml b/echopype/echodata/convention/1.0.yml index 51b6ef229..7b8110b27 100644 --- a/echopype/echodata/convention/1.0.yml +++ b/echopype/echodata/convention/1.0.yml @@ -45,6 +45,14 @@ groups: including split-beam angle data when they exist. Only exists if complex backscatter data are already in Sonar/Beam_group1 ep_group: Sonar/Beam_group2 + beam_group3: + name: Beam_group3 + description: '' + ep_group: Sonar/Beam_group3 + beam_group4: + name: Beam_group4 + description: '' + ep_group: Sonar/Beam_group4 vendor: name: Vendor_specific description: contains vendor-specific information about the sonar and the data. diff --git a/echopype/tests/convert/test_convert_ad2cp.py b/echopype/tests/convert/test_convert_ad2cp.py index b91ec59be..bf7656f16 100644 --- a/echopype/tests/convert/test_convert_ad2cp.py +++ b/echopype/tests/convert/test_convert_ad2cp.py @@ -170,7 +170,11 @@ def _check_raw_output( if base.attrs[f"Instrument_echo_pulseComp{i}"]: pulse_compressed = i break - assert echodata.vendor.attrs["pulse_compressed"] == pulse_compressed + for i in range(1, len(echodata["Sonar"]["beam_group"]) + 1): + if "pulse_compressed" in echodata[f"Sonar/Beam_group{i}"]: + pulse_compressed_vector = np.zeros(3) + pulse_compressed_vector[pulse_compressed - 1] = 1 + assert (echodata[f"Sonar/Beam_group{i}"]["pulse_compressed"] == pulse_compressed_vector).all() base.close() # check raw data transmit samples @@ -187,35 +191,40 @@ def _check_raw_output( group="Data/RawEcho1_1000kHzTx", ) if "090" in filepath_raw.parts: - assert np.allclose( - echodata.vendor[ - "echosounder_raw_transmit_samples_i" - ].data.flatten(), - base["DataI"].data.flatten(), - atol=absolute_tolerance, - ) - assert np.allclose( - echodata.vendor[ - "echosounder_raw_transmit_samples_q" - ].data.flatten(), - base["DataQ"].data.flatten(), - atol=absolute_tolerance, - ) + for i in range(1, len(echodata["Sonar"]["beam_group"]) + 1): + if "transmit_pulse_r" in echodata[f"Sonar/Beam_group{i}"]: + assert np.allclose( + echodata[f"Sonar/Beam_group{i}"][ + "transmit_pulse_r" + ].data.flatten(), + base["DataI"].data.flatten(), + atol=absolute_tolerance, + ) + assert np.allclose( + echodata[f"Sonar/Beam_group{i}"][ + "transmit_pulse_i" + ].data.flatten(), + base["DataQ"].data.flatten(), + atol=absolute_tolerance, + ) else: - assert np.allclose( - echodata.vendor[ - "echosounder_raw_transmit_samples_i" - ].data.flatten(), - base["Data_I"].data.flatten(), - atol=absolute_tolerance, - ) - assert np.allclose( - echodata.vendor[ - "echosounder_raw_transmit_samples_q" - ].data.flatten(), - base["Data_Q"].data.flatten(), - atol=absolute_tolerance, - ) + for i in range(1, len(echodata["Sonar"]["beam_group"]) + 1): + if "transmit_pulse_r" in echodata[f"Sonar/Beam_group{i}"]: + # note the underscore + assert np.allclose( + echodata[f"Sonar/Beam_group{i}"][ + "transmit_pulse_r" + ].data.flatten(), + base["Data_I"].data.flatten(), + atol=absolute_tolerance, + ) + assert np.allclose( + echodata[f"Sonar/Beam_group{i}"][ + "transmit_pulse_i" + ].data.flatten(), + base["Data_Q"].data.flatten(), + atol=absolute_tolerance, + ) base.close() # check raw data samples @@ -224,26 +233,30 @@ def _check_raw_output( group="Data/RawEcho1_1000kHz", ) if "090" in filepath_raw.parts: - assert np.allclose( - echodata.vendor["echosounder_raw_samples_i"].data.flatten(), - base["DataI"].data.flatten(), - atol=absolute_tolerance, - ) - assert np.allclose( - echodata.vendor["echosounder_raw_samples_q"].data.flatten(), - base["DataQ"].data.flatten(), - atol=absolute_tolerance, - ) + for i in range(1, len(echodata["Sonar"]["beam_group"]) + 1): + if "backscatter_r" in echodata[f"Sonar/Beam_group{i}"]: + assert np.allclose( + echodata[f"Sonar/Beam_group{i}"]["backscatter_r"].data.flatten(), + base["DataI"].data.flatten(), + atol=absolute_tolerance, + ) + assert np.allclose( + echodata[f"Sonar/Beam_group{i}"]["backscatter_i"].data.flatten(), + base["DataQ"].data.flatten(), + atol=absolute_tolerance, + ) else: - # note the transpose - assert np.allclose( - echodata.vendor["echosounder_raw_samples_i"].data.flatten(), - base["Data_I"].data.T.flatten(), - atol=absolute_tolerance, - ) - assert np.allclose( - echodata.vendor["echosounder_raw_samples_q"].data.flatten(), - base["Data_Q"].data.T.flatten(), - atol=absolute_tolerance, - ) + for i in range(1, len(echodata["Sonar"]["beam_group"]) + 1): + if "transmit_pulse_r" in echodata[f"Sonar/Beam_group{i}"]: + # note the transpose + assert np.allclose( + echodata[f"Sonar/Beam_group{i}"]["backscatter_r"].data.flatten(), + base["Data_I"].data.T.flatten(), + atol=absolute_tolerance, + ) + assert np.allclose( + echodata[f"Sonar/Beam_group{i}"]["backscatter_i"].data.flatten(), + base["Data_Q"].data.T.flatten(), + atol=absolute_tolerance, + ) base.close() diff --git a/echopype/utils/coding.py b/echopype/utils/coding.py index f8583e964..ca1339ed5 100644 --- a/echopype/utils/coding.py +++ b/echopype/utils/coding.py @@ -20,11 +20,7 @@ DEFAULT_ENCODINGS = { "ping_time": DEFAULT_TIME_ENCODING, - "ping_time_burst": DEFAULT_TIME_ENCODING, - "ping_time_average": DEFAULT_TIME_ENCODING, - "ping_time_echosounder": DEFAULT_TIME_ENCODING, - "ping_time_echosounder_raw": DEFAULT_TIME_ENCODING, - "ping_time_echosounder_raw_transmit": DEFAULT_TIME_ENCODING, + "ping_time_transmit": DEFAULT_TIME_ENCODING, "time1": DEFAULT_TIME_ENCODING, "time2": DEFAULT_TIME_ENCODING, "time3": DEFAULT_TIME_ENCODING,