From 88b2f2343f0e89622265c6295f252678815f465f Mon Sep 17 00:00:00 2001 From: Max Burnette Date: Mon, 16 Dec 2019 09:48:47 -0600 Subject: [PATCH] add vnir_middle calibration --- Hyperspectral_Calibration_20191212.txt | 305 ++++++++++++++++++ hyperspectral/calibrate.py | 301 ++++++++++------- .../calibration_new/swir_new/readme.txt | 2 +- .../vnir_middle/best_matched_index.npy | Bin 0 -> 5424 bytes .../vnir_middle/bias_coeff.npy | Bin 0 -> 5424 bytes .../vnir_middle/gain_coeff.npy | Bin 0 -> 5424 bytes .../calibration_new/vnir_middle/readme.txt | 4 + .../calibration_new/vnir_new/readme.txt | 2 +- hyperspectral/extractor_info.json | 2 +- hyperspectral/terra_hyperspectral.py | 36 +++ 10 files changed, 532 insertions(+), 120 deletions(-) create mode 100644 Hyperspectral_Calibration_20191212.txt create mode 100755 hyperspectral/calibration_new/vnir_middle/best_matched_index.npy create mode 100755 hyperspectral/calibration_new/vnir_middle/bias_coeff.npy create mode 100755 hyperspectral/calibration_new/vnir_middle/gain_coeff.npy create mode 100755 hyperspectral/calibration_new/vnir_middle/readme.txt diff --git a/Hyperspectral_Calibration_20191212.txt b/Hyperspectral_Calibration_20191212.txt new file mode 100644 index 0000000..25e8874 --- /dev/null +++ b/Hyperspectral_Calibration_20191212.txt @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- + +""" +*** Radiometric calibration of hyperspectral imagery *** + +This framework will process the raw hyperspectral VNIR and SWIR imagery using the pre-computed +calibration models and real-time downwelling irradiance data, convert raw image digital number(DN) +to reflectance and save as netCDF files. + +Input: 1) VNIR and SWIR raw hyperspectral imagery + 2) Spectral data from downwelling irradiance sensor + 3) Pre-computed calibration models (includes four models: vnir_new, vnir_middle, vnir_old, swir_new; models are not avaialbe for swir_old and swir_middle + because no downwelling irridiance data/sensor was avaiable for that time period) + +Output: Hyperspectral reflectance imagery with netCDF format + +@author: Remote Sensing Lab at Saint Louis University + +""" + +import json +import numpy as np +import os +import spectral.io.envi as envi +from netCDF4 import Dataset + +raw_root = "/home/extractor/sites/ua-mac/raw_data" +# raw_root = "/home/extractor/hs_calib" + +# extract spectral profiles from environmentlogger.json +def irradiance_time_extractor(camera_type,envlog_file): + # For the environmental logger records after 04/26/2016, there would be 24 files per day (1 file per hour, 5 seconds per record) + # Convert json fiel to dictionary format file + with open(envlog_file, "r") as fp: + lines = fp.readlines() + slines = "".join(lines) + js = json.loads(slines) + + # assume that time stamp follows in 5 second increments across records since 5 sec/record + num_readings = len(js["environment_sensor_readings"]) + if "spectrometers" in js["environment_sensor_readings"][0]: + if camera_type == "swir_new": + num_bands = len(js["environment_sensor_readings"][0]["spectrometers"]["NIRQuest-512"]["spectrum"]) + else: + num_bands = len(js["environment_sensor_readings"][0]["spectrometers"]["FLAME-T"]["spectrum"]) + else: + num_bands = len(js["environment_sensor_readings"][0]["spectrometer"]["spectrum"]) + + spectra = np.zeros((num_readings, num_bands)) + times = [] + for idx in range(num_readings): + # read time stamp + time_current = js["environment_sensor_readings"][idx]["timestamp"] + C = time_current.replace("."," ").replace("-"," ").replace(":","") + ArrayTime=C.split(" ") + time_current_r = int(ArrayTime[3]) + times.append(time_current_r) + + # read spectrum from irridiance sensors + if "spectrometers" in js["environment_sensor_readings"][idx]: + if camera_type == "swir_new": + spectrum = js["environment_sensor_readings"][0]["spectrometers"]["NIRQuest-512"]["spectrum"] + else: + spectrum = js["environment_sensor_readings"][idx]["spectrometers"]["FLAME-T"]["spectrum"] + else: + spectrum = js["environment_sensor_readings"][idx]["spectrometer"]["spectrum"] + + spectra[idx,:] = spectrum + + return times, spectra + +# replace rfl_img variable in netcdf with given matrix +def update_netcdf(inp, rfl_data, camera_type): + print("Updating %s" % inp) + + out = inp.replace(".nc", "_newrfl.nc") + + with Dataset(inp) as src, Dataset(out, "w") as dst: + # copy global attributes all at once via dictionary + dst.setncatts(src.__dict__) + # copy dimensions + for name, dimension in src.dimensions.items(): + dst.createDimension(name, (len(dimension) if not dimension.isunlimited() else None)) + + # copy all file data except for the excluded + for name, variable in src.variables.items(): + if name == "Google_Map_View": + continue + + # Create variables + var_dict = (src[name].__dict__) + if '_FillValue' in var_dict.keys(): + x = dst.createVariable(name, variable.datatype, variable.dimensions, fill_value=var_dict['_FillValue']) + del var_dict['_FillValue'] + else: + x = dst.createVariable(name, variable.datatype, variable.dimensions) + + # Set variables to values + if name != "rfl_img": + print("...%s" % name) + dst[name][:] = src[name][:] + else: + if camera_type=='vnir_old': + print("...%s (subset)" % name) + dst[name][:679,:,:] = rfl_data + # 679-955 set to NaN + print("...NaNs") + dst[name][679:,:,:] = np.nan + + elif camera_type == "vnir_middle": + print("...%s (subset)" % name) + dst[name][:662,:,:] = rfl_data + # 679-955 set to NaN + print("...NaNs") + dst[name][662:,:,:] = np.nan + else: + print("...%s" % name) + dst[name][:] = rfl_data + + # copy variable attributes all at once via dictionary + dst[name].setncatts(var_dict) + + if 'rfl_img' not in src.variables: + print("...adding rfl_img") + dst.createVariable("rfl_img", "f4") + dst.variables['rfl_img'] = rfl_data + +# apply calibration algorithm to the raw data +def apply_calibration(raw_filepath): + print("Calibrating %s" % raw_filepath) + + # get necessary paths from path to _raw file + raw_dir = os.path.dirname(raw_filepath) + raw_file = os.path.basename(raw_filepath) + md_file = os.path.join(raw_dir, "%s_metadata.json" % raw_file[:-4]) + date = raw_filepath.split("/")[-3] + timestamp = raw_filepath.split("/")[-2] + envlog_dir = os.path.join(raw_root, "EnvironmentLogger/%s" % date) + + # determine type of sensor and age of camera + if raw_filepath.find("VNIR") > -1: + if date < "2018-08-18": + camera_type = "vnir_old" + num_spectral_bands = 955 + num_bands_irradiance = 1024 + image_scanning_time = 540 + elif "2018-08-18" <= date < "2019-02-26": + camera_type = "vnir_middle" + num_spectral_bands = 939 + num_bands_irradiance = 1024 + image_scanning_time = 540 + else: + camera_type = "vnir_new" + num_spectral_bands = 939 + num_bands_irradiance = 3648 + # it is obverved that it takes an average of 3.5 mins/scan = 210 seconds + image_scanning_time = 210 + else: + if date < "2019-02-26": # Note that no calibration models are available for old&middle swir data + camera_type = "swir_old_middle" + else: + camera_type = "swir_new" + num_spectral_bands = 275 + num_bands_irradiance = 512 + image_scanning_time = 210 + + print("MODE: ---------- %s ----------" % camera_type) + + # load the raw data set + print("Loading %s.hdr" % raw_filepath) + try: + raw = envi.open(raw_filepath +'.hdr') +# img_DN = raw.load() + img_DN = raw.open_memmap() + #head_file = envi.read_envi_header(data_fullpath +'.hdr') + except IOError: + print('No such file named %s' % raw_filepath) + + # Apply calibration procedure if camera_type == vnir_old, vnir_middle, vnir_new or swir_new. Since no calibration models are available for + # swir_old and swir_middle, so directly convert old&middel SWIR raw data to netcdf format + if camera_type =="swir_old_middle": + # Convert the raw swir_old and swir_middle data to netCDF + img_DN = np.rollaxis(img_DN, 2, 0) + # Generate output path and call the netCDF conversion function, convert the raw old&middle swir data to netcdf + out_path = os.path.dirname(raw_filepath.replace("raw_data", "Level_1").replace("SWIR", "swir_netcdf").replace("VNIR", "vnir_netcdf")) + out_file = os.path.join(out_path, "%s_netcdf_L1_ua-mac_%s.nc" % (camera_type.split("_")[0], timestamp)) + update_netcdf(out_file, img_DN, camera_type) + + # free up memory + del img_DN + + else: # when camera_type == vnir_old, vnir_middle, vnir_new or swir_new, apply pre-computed calibration models + # Load the previously created calibration models based on the camera_type + best_matched = os.path.join(raw_root + "/" + "calibration_new", camera_type, 'best_matched_index.npy') + bias = os.path.join(raw_root + "/" + "calibration_new", camera_type, 'bias_coeff.npy') + gain = os.path.join(raw_root + "/" + "calibration_new", camera_type, 'gain_coeff.npy') + # read EnvLog data + print("Reading EnvLog files in %s" % envlog_dir) + envlog_tot_time = [] + envlog_spectra = np.array([], dtype=np.int64).reshape(0, num_bands_irradiance) + for ef in os.listdir(envlog_dir): + if ef.endswith("environmentlogger.json"): + print(ef) + time, spectrum = irradiance_time_extractor(camera_type,os.path.join(envlog_dir, ef)) + envlog_tot_time += time + # print("concatenating %s onto %s" % (spectrum.shape, envlog_spectra.shape)) + envlog_spectra = np.vstack([envlog_spectra, spectrum]) + + # Find the best match time range between image time stamp and EnvLog time stamp + num_irridiance_record = int(image_scanning_time/5) # 210/5=4.2 ----> 5 seconds per record + + # concatenation of hour mins and seconds of the image time stamp (eg., 12-38-49 to 123849) + with open(md_file) as json_file: + img_meta_data = json.load(json_file) + meta_data_time = img_meta_data['lemnatec_measurement_metadata']['gantry_system_variable_metadata']['time'] + image_time = meta_data_time[-8:] + image_time = int(image_time.replace(":","")) + + # compute the absolute difference between + print("Computing mean spectrum") + abs_diff_time = np.zeros((len(envlog_tot_time))) + for k in range(len(envlog_tot_time)): + abs_diff_time[k] = abs(image_time - envlog_tot_time[k]) + ind_closet_time = np.argmin(abs_diff_time) # closest time index + mean_spectrum = np.mean(envlog_spectra[ind_closet_time : ind_closet_time + num_irridiance_record-1, :], axis=0) + + # load pre-computed the best matched index between image and irradiance sensor spectral bands + best_matched_index = np.load(best_matched) + test_irridance = mean_spectrum[best_matched_index.astype(int).tolist()] + test_irridance_re = np.resize(test_irridance, (1, num_spectral_bands)) + + # load and apply precomputed coefficient to convert irradiance to DN + b = np.load(bias) + g = np.load(gain) + if camera_type == "vnir_old": + test_irridance_re = test_irridance_re[:,0:679] + img_DN = img_DN[:,:,0:679] + if camera_type == "vnir_middle": + test_irridance_re = test_irridance_re[:,0:662] + img_DN = img_DN[:,:,0:662] + + irrad2DN = (g * test_irridance_re) + b + + # reflectance computation + print("Computing reflectance") + rfl_data = img_DN/irrad2DN + rfl_data = np.rollaxis(rfl_data, 2, 0) + + # free up memory + del img_DN + del irrad2DN + + # prepare output paths + print("Generating output") + out_path = os.path.dirname(raw_filepath.replace("raw_data", "Level_1").replace("SWIR", "swir_netcdf").replace("VNIR", "vnir_netcdf")) + if not os.path.isdir(out_path): + os.makedirs(out_path) + + # save as ENVI file (RGB bands: 392, 252, 127) + #out_file = os.path.join('ref_%s.hdr' % raw_file) + #envi.save_image(out_file, Ref, dtype=np.float32, interleave='bil', force = 'True', metadata=head_file) + + # Save Ref as a .npy file + #out_file = os.path.join(out_path, 'ref_%s.npy' % raw_file) + #np.save(out_file, rfl_data) + + # Write to nc file + out_file = os.path.join(out_path, "%s_netcdf_L1_ua-mac_%s.nc" % (camera_type.split("_")[0], timestamp)) + update_netcdf(out_file, rfl_data, camera_type) + + # free up memory + del rfl_data + +# TODO: This will come from the extractor message +input_paths = [ +# swir_old + # NA os.path.join(raw_root, "SWIR/2017-04-16/2017-04-16__11-50-46-707/c6079666-b686-4481-9a4f-0663f5f43a6a_raw"), +# swir_new + # OK os.path.join(raw_root, "SWIR/2018-09-22/2018-09-22__13-21-35-977/05b7ad1a-a2d7-4dfc-bcec-b1a394ec0892_raw"), + # OK os.path.join(raw_root, "SWIR/2018-10-11/2018-10-11__12-11-43-420/dc60c7d5-24bc-432b-a7cb-98ac1da73154_raw"), +# vnir_old + # OK os.path.join(raw_root, "VNIR/2017-04-15/2017-04-15__11-33-42-265/76efd15f-928a-49f7-a008-4877b8842129_raw"), + # OK os.path.join(raw_root, "VNIR/2017-04-17/2017-04-17__16-39-35-738/a5096c7a-c052-4728-a29b-a5a6119c366c_raw"), + # OK os.path.join(raw_root, "VNIR/2017-04-17/2017-07-08__06-30-15-622/5154c9fc-0a51-4a4d-9c79-242539f057ad_raw"), + # os.path.join(raw_root, "VNIR/2017-05-13/2017-05-13__12-00-39-756/1bcc7cd0-1205-45a3-b4b3-cbcac6236754_raw"), # Killed + # os.path.join(raw_root, "VNIR/2017-06-18/2017-06-18__14-34-24-390/41a0b327-83ff-4131-b1fd-5ee5254760b6_raw"), # Killed + # os.path.join(raw_root, "VNIR/2017-07-27/2017-07-27__15-05-11-667/d1643679-bef3-4179-9912-d63bf4cd53c6_raw"), + #os.path.join(raw_root, "VNIR/2017-08-23/2017-08-23__09-21-43-959/ea0e3408-ed1c-412d-aa68-e75ce2e902b1_raw"), + #os.path.join(raw_root, "VNIR/2018-09-26/2018-09-26__13-36-22-843/73e4642f-1ac7-430a-a00e-a04224bde9db_raw"), +# vnir_middle + # NA os.path.join(raw_root, "VNIR/2018-08-18/2018-08-18__11-11-41-890/c5f4d50f-44ad-4e23-9d92-f10e62110ac7_raw"), + # NA os.path.join(raw_root, "VNIR/2018-10-08/2018-10-08__11-41-01-365/5e39a30f-d343-405e-a140-db26dc72eb59_raw"), +# vnir_new + # GEN os.path.join(raw_root, "VNIR/2019-06-17/2019-06-17__14-03-29-760/d51b6f4c-9246-4da8-9a6c-786bb1dc21bf_raw"), +] + +for p in input_paths: + apply_calibration(p) + + + + + + + \ No newline at end of file diff --git a/hyperspectral/calibrate.py b/hyperspectral/calibrate.py index fce9d57..cdd257f 100644 --- a/hyperspectral/calibrate.py +++ b/hyperspectral/calibrate.py @@ -14,16 +14,18 @@ import json import numpy as np import os +import subprocess import spectral.io.envi as envi from netCDF4 import Dataset raw_root = "/home/extractor/sites/ua-mac/raw_data" -# raw_root = "/home/extractor/hs_calib" +calib_root = "/home/extractor" # extract spectral profiles from environmentlogger.json -def irradiance_time_extractor(envlog_file): +def irradiance_time_extractor(camera_type, envlog_file): # For the environmental logger records after 04/26/2016, there would be 24 files per day (1 file per hour, 5 seconds per record) + # Convert json fiel to dictionary format file with open(envlog_file, "r") as fp: lines = fp.readlines() slines = "".join(lines) @@ -32,9 +34,13 @@ def irradiance_time_extractor(envlog_file): # assume that time stamp follows in 5 second increments across records since 5 sec/record num_readings = len(js["environment_sensor_readings"]) if "spectrometers" in js["environment_sensor_readings"][0]: - num_bands = len(js["environment_sensor_readings"][0]["spectrometers"]["FLAME-T"]["spectrum"]) + if camera_type == "swir_new": + num_bands = len(js["environment_sensor_readings"][0]["spectrometers"]["NIRQuest-512"]["spectrum"]) + else: + num_bands = len(js["environment_sensor_readings"][0]["spectrometers"]["FLAME-T"]["spectrum"]) else: num_bands = len(js["environment_sensor_readings"][0]["spectrometer"]["spectrum"]) + spectra = np.zeros((num_readings, num_bands)) times = [] for idx in range(num_readings): @@ -47,15 +53,19 @@ def irradiance_time_extractor(envlog_file): # read spectrum from irridiance sensors if "spectrometers" in js["environment_sensor_readings"][idx]: - spectrum = js["environment_sensor_readings"][idx]["spectrometers"]["FLAME-T"]["spectrum"] + if camera_type == "swir_new": + spectrum = js["environment_sensor_readings"][0]["spectrometers"]["NIRQuest-512"]["spectrum"] + else: + spectrum = js["environment_sensor_readings"][idx]["spectrometers"]["FLAME-T"]["spectrum"] else: spectrum = js["environment_sensor_readings"][idx]["spectrometer"]["spectrum"] + spectra[idx,:] = spectrum return times, spectra # replace rfl_img variable in netcdf with given matrix -def update_netcdf(inp, rfl_data, subset_range=False): +def update_netcdf(inp, rfl_data, camera_type): print("Updating %s" % inp) out = inp.replace(".nc", "_newrfl.nc") @@ -85,12 +95,19 @@ def update_netcdf(inp, rfl_data, subset_range=False): print("...%s" % name) dst[name][:] = src[name][:] else: - if subset_range: + if camera_type=='vnir_old': print("...%s (subset)" % name) dst[name][:679,:,:] = rfl_data # 679-955 set to NaN print("...NaNs") dst[name][679:,:,:] = np.nan + + elif camera_type == "vnir_middle": + print("...%s (subset)" % name) + dst[name][:662,:,:] = rfl_data + # 679-955 set to NaN + print("...NaNs") + dst[name][662:,:,:] = np.nan else: print("...%s" % name) dst[name][:] = rfl_data @@ -115,6 +132,15 @@ def apply_calibration(raw_filepath): timestamp = raw_filepath.split("/")[-2] envlog_dir = os.path.join(raw_root, "EnvironmentLogger/%s" % date) + # prepare output paths + print("Generating output") + out_path = os.path.dirname(raw_filepath.replace("raw_data", "Level_1").replace("SWIR", "swir_netcdf").replace("VNIR", "vnir_netcdf")) + if not os.path.isdir(out_path): + os.makedirs(out_path) + if os.path.isfile(out_path): + print("Output file already exists: skipping "+out_path) + return + # determine type of sensor and age of camera if raw_filepath.find("VNIR") > -1: if date < "2018-08-18": @@ -123,11 +149,10 @@ def apply_calibration(raw_filepath): num_bands_irradiance = 1024 image_scanning_time = 540 elif "2018-08-18" <= date < "2019-02-26": - # TODO: This may need special handling - vnir_middle - camera_type = "vnir_new" + camera_type = "vnir_middle" num_spectral_bands = 939 num_bands_irradiance = 1024 - image_scanning_time = 210 + image_scanning_time = 540 else: camera_type = "vnir_new" num_spectral_bands = 939 @@ -135,127 +160,169 @@ def apply_calibration(raw_filepath): # it is obverved that it takes an average of 3.5 mins/scan = 210 seconds image_scanning_time = 210 else: - if date < "2018-08-17": - # swir_old does not have necessary input files - camera_type = "swir_new" - num_spectral_bands = 273 - num_bands_irradiance = 1024 #512 - image_scanning_time = 210 + if date < "2019-02-26": # Note that no calibration models are available for old&middle swir data + camera_type = "swir_old_middle" else: camera_type = "swir_new" num_spectral_bands = 275 - num_bands_irradiance = 1024 #512 + num_bands_irradiance = 512 image_scanning_time = 210 print("MODE: ---------- %s ----------" % camera_type) - best_matched = os.path.join("calibration_new", camera_type, 'best_matched_index.npy') - bias = os.path.join("calibration_new", camera_type, 'bias_coeff.npy') - gain = os.path.join("calibration_new", camera_type, 'gain_coeff.npy') - # load the raw data set print("Loading %s.hdr" % raw_filepath) try: raw = envi.open(raw_filepath +'.hdr') - #img_DN = raw.load() + # img_DN = raw.load() img_DN = raw.open_memmap() #head_file = envi.read_envi_header(data_fullpath +'.hdr') except IOError: print('No such file named %s' % raw_filepath) - # read EnvLog data - print("Reading EnvLog files in %s" % envlog_dir) - envlog_tot_time = [] - envlog_spectra = np.array([], dtype=np.int64).reshape(0, num_bands_irradiance) - for ef in os.listdir(envlog_dir): - if ef.endswith("environmentlogger.json"): - print(ef) - time, spectrum = irradiance_time_extractor(os.path.join(envlog_dir, ef)) - envlog_tot_time += time - # print("concatenating %s onto %s" % (spectrum.shape, envlog_spectra.shape)) - envlog_spectra = np.vstack([envlog_spectra, spectrum]) - - # Find the best match time range between image time stamp and EnvLog time stamp - num_irridiance_record = int(image_scanning_time/5) # 210/5=4.2 ----> 5 seconds per record - - # concatenation of hour mins and seconds of the image time stamp (eg., 12-38-49 to 123849) - with open(md_file) as json_file: - img_meta_data = json.load(json_file) - meta_data_time = img_meta_data['lemnatec_measurement_metadata']['gantry_system_variable_metadata']['time'] - image_time = meta_data_time[-8:] - image_time = int(image_time.replace(":","")) - - # computer the absolute difference between - print("Computing mean spectrum") - abs_diff_time = np.zeros((len(envlog_tot_time))) - for k in range(len(envlog_tot_time)): - abs_diff_time[k] = abs(image_time - envlog_tot_time[k]) - ind_closet_time = np.argmin(abs_diff_time) # closest time index - mean_spectrum = np.mean(envlog_spectra[ind_closet_time : ind_closet_time + num_irridiance_record-1, :], axis=0) - - # load pre-computed the best matched index between image and irradiance sensor spectral bands - best_matched_index = np.load(best_matched) - test_irridance = mean_spectrum[best_matched_index.astype(int).tolist()] - test_irridance_re = np.resize(test_irridance, (1, num_spectral_bands)) - - # load and apply precomputed coefficient to convert irradiance to DN - b = np.load(bias) - g = np.load(gain) - if camera_type == "vnir_old": - test_irridance_re = test_irridance_re[:,0:679] - img_DN = img_DN[:,:,0:679] - irrad2DN = (g * test_irridance_re) + b - - # reflectance computation - print("Computing reflectance") - rfl_data = img_DN/irrad2DN - rfl_data = np.rollaxis(rfl_data, 2, 0) - - # free up memory - del img_DN - del irrad2DN - - # prepare output paths - print("Generating output") - out_path = os.path.dirname(raw_filepath.replace("raw_data", "Level_1").replace("SWIR", "swir_netcdf").replace("VNIR", "vnir_netcdf")) - if not os.path.isdir(out_path): - os.makedirs(out_path) - - # save as ENVI file (RGB bands: 392, 252, 127) - #out_file = os.path.join('ref_%s.hdr' % raw_file) - #envi.save_image(out_file, Ref, dtype=np.float32, interleave='bil', force = 'True', metadata=head_file) - - # Save Ref as a .npy file - #out_file = os.path.join(out_path, 'ref_%s.npy' % raw_file) - #np.save(out_file, rfl_data) - - # Write to nc file - out_file = os.path.join(out_path, "%s_netcdf_L1_ua-mac_%s.nc" % (camera_type.split("_")[0], timestamp)) - update_netcdf(out_file, rfl_data, camera_type=="vnir_old") - - -# TODO: This will come from the extractor message -input_paths = [ -# swir_old - # NA os.path.join(raw_root, "SWIR/2017-04-16/2017-04-16__11-50-46-707/c6079666-b686-4481-9a4f-0663f5f43a6a_raw"), -# swir_new - # OK os.path.join(raw_root, "SWIR/2018-09-22/2018-09-22__13-21-35-977/05b7ad1a-a2d7-4dfc-bcec-b1a394ec0892_raw"), - # OK os.path.join(raw_root, "SWIR/2018-10-11/2018-10-11__12-11-43-420/dc60c7d5-24bc-432b-a7cb-98ac1da73154_raw"), -# vnir_old - # OK os.path.join(raw_root, "VNIR/2017-04-15/2017-04-15__11-33-42-265/76efd15f-928a-49f7-a008-4877b8842129_raw"), - # OK os.path.join(raw_root, "VNIR/2017-04-17/2017-04-17__16-39-35-738/a5096c7a-c052-4728-a29b-a5a6119c366c_raw"), - # OK os.path.join(raw_root, "VNIR/2017-04-17/2017-07-08__06-30-15-622/5154c9fc-0a51-4a4d-9c79-242539f057ad_raw"), - # os.path.join(raw_root, "VNIR/2017-05-13/2017-05-13__12-00-39-756/1bcc7cd0-1205-45a3-b4b3-cbcac6236754_raw"), # Killed - # os.path.join(raw_root, "VNIR/2017-06-18/2017-06-18__14-34-24-390/41a0b327-83ff-4131-b1fd-5ee5254760b6_raw"), # Killed - # os.path.join(raw_root, "VNIR/2017-07-27/2017-07-27__15-05-11-667/d1643679-bef3-4179-9912-d63bf4cd53c6_raw"), - os.path.join(raw_root, "VNIR/2017-08-23/2017-08-23__09-21-43-959/ea0e3408-ed1c-412d-aa68-e75ce2e902b1_raw"), -# vnir_middle - # NA os.path.join(raw_root, "VNIR/2018-08-18/2018-08-18__11-11-41-890/c5f4d50f-44ad-4e23-9d92-f10e62110ac7_raw"), - # NA os.path.join(raw_root, "VNIR/2018-10-08/2018-10-08__11-41-01-365/5e39a30f-d343-405e-a140-db26dc72eb59_raw"), -# vnir_new - # GEN os.path.join(raw_root, "VNIR/2019-06-17/2019-06-17__14-03-29-760/d51b6f4c-9246-4da8-9a6c-786bb1dc21bf_raw"), - -] - -for p in input_paths: - apply_calibration(p) + # Apply calibration procedure if camera_type == vnir_old, vnir_middle, vnir_new or swir_new. Since no calibration models are available for + # swir_old and swir_middle, so directly convert old&middel SWIR raw data to netcdf format + if camera_type =="swir_old_middle": + # Convert the raw swir_old and swir_middle data to netCDF + img_DN = np.rollaxis(img_DN, 2, 0) + # Generate output path and call the netCDF conversion function, convert the raw old&middle swir data to netcdf + out_path = os.path.dirname(raw_filepath.replace("raw_data", "Level_1").replace("SWIR", "swir_netcdf").replace("VNIR", "vnir_netcdf")) + out_file = os.path.join(out_path, "%s_netcdf_L1_ua-mac_%s.nc" % (camera_type.split("_")[0], timestamp)) + update_netcdf(out_file, img_DN, camera_type) + + # free up memory + del img_DN + + else: # when camera_type == vnir_old, vnir_middle, vnir_new or swir_new, apply pre-computed calibration models + # Load the previously created calibration models based on the camera_type + best_matched = os.path.join(calib_root + "/" + "calibration_new", camera_type, 'best_matched_index.npy') + bias = os.path.join(calib_root + "/" + "calibration_new", camera_type, 'bias_coeff.npy') + gain = os.path.join(calib_root + "/" + "calibration_new", camera_type, 'gain_coeff.npy') + # read EnvLog data + print("Reading EnvLog files in %s" % envlog_dir) + envlog_tot_time = [] + envlog_spectra = np.array([], dtype=np.int64).reshape(0, num_bands_irradiance) + for ef in os.listdir(envlog_dir): + if ef.endswith("environmentlogger.json"): + + time, spectrum = irradiance_time_extractor(camera_type,os.path.join(envlog_dir, ef)) + envlog_tot_time += time + # print("concatenating %s onto %s" % (spectrum.shape, envlog_spectra.shape)) + envlog_spectra = np.vstack([envlog_spectra, spectrum]) + + # Find the best match time range between image time stamp and EnvLog time stamp + num_irridiance_record = int(image_scanning_time/5) # 210/5=4.2 ----> 5 seconds per record + + # concatenation of hour mins and seconds of the image time stamp (eg., 12-38-49 to 123849) + with open(md_file) as json_file: + img_meta_data = json.load(json_file) + meta_data_time = img_meta_data['lemnatec_measurement_metadata']['gantry_system_variable_metadata']['time'] + image_time = meta_data_time[-8:] + image_time = int(image_time.replace(":","")) + + # compute the absolute difference between + print("Computing mean spectrum") + abs_diff_time = np.zeros((len(envlog_tot_time))) + for k in range(len(envlog_tot_time)): + abs_diff_time[k] = abs(image_time - envlog_tot_time[k]) + ind_closet_time = np.argmin(abs_diff_time) # closest time index + mean_spectrum = np.mean(envlog_spectra[ind_closet_time : ind_closet_time + num_irridiance_record-1, :], axis=0) + + # load pre-computed the best matched index between image and irradiance sensor spectral bands + best_matched_index = np.load(best_matched) + test_irridance = mean_spectrum[best_matched_index.astype(int).tolist()] + test_irridance_re = np.resize(test_irridance, (1, num_spectral_bands)) + + # load and apply precomputed coefficient to convert irradiance to DN + b = np.load(bias) + g = np.load(gain) + if camera_type == "vnir_old": + test_irridance_re = test_irridance_re[:,0:679] + img_DN = img_DN[:,:,0:679] + if camera_type == "vnir_middle": + test_irridance_re = test_irridance_re[:,0:662] + img_DN = img_DN[:,:,0:662] + + irrad2DN = (g * test_irridance_re) + b + + # reflectance computation + print("Computing reflectance") + rfl_data = img_DN/irrad2DN + rfl_data = np.rollaxis(rfl_data, 2, 0) + + # free up memory + del img_DN + del irrad2DN + + # save as ENVI file (RGB bands: 392, 252, 127) + #out_file = os.path.join('ref_%s.hdr' % raw_file) + #envi.save_image(out_file, Ref, dtype=np.float32, interleave='bil', force = 'True', metadata=head_file) + + # Save Ref as a .npy file + #out_file = os.path.join(out_path, 'ref_%s.npy' % raw_file) + #np.save(out_file, rfl_data) + + # Write to nc file + out_file = os.path.join(out_path, "%s_netcdf_L1_ua-mac_%s.nc" % (camera_type.split("_")[0], timestamp)) + update_netcdf(out_file, rfl_data, camera_type) + + # free up memory + del rfl_data + + +if __name__ == "__main__": + # TODO: This will come from the extractor message + input_paths = [ + # swir_old + # NA os.path.join(raw_root, "SWIR/2017-04-16/2017-04-16__11-50-46-707/c6079666-b686-4481-9a4f-0663f5f43a6a_raw"), + # swir_new + # OK os.path.join(raw_root, "SWIR/2018-09-22/2018-09-22__13-21-35-977/05b7ad1a-a2d7-4dfc-bcec-b1a394ec0892_raw"), + # OK os.path.join(raw_root, "SWIR/2018-10-11/2018-10-11__12-11-43-420/dc60c7d5-24bc-432b-a7cb-98ac1da73154_raw"), + # vnir_old + # OK os.path.join(raw_root, "VNIR/2017-04-15/2017-04-15__11-33-42-265/76efd15f-928a-49f7-a008-4877b8842129_raw"), + # OK os.path.join(raw_root, "VNIR/2017-04-17/2017-04-17__16-39-35-738/a5096c7a-c052-4728-a29b-a5a6119c366c_raw"), + # OK os.path.join(raw_root, "VNIR/2017-04-17/2017-07-08__06-30-15-622/5154c9fc-0a51-4a4d-9c79-242539f057ad_raw"), + # os.path.join(raw_root, "VNIR/2017-05-13/2017-05-13__12-00-39-756/1bcc7cd0-1205-45a3-b4b3-cbcac6236754_raw"), # Killed + # os.path.join(raw_root, "VNIR/2017-06-18/2017-06-18__14-34-24-390/41a0b327-83ff-4131-b1fd-5ee5254760b6_raw"), # Killed + # os.path.join(raw_root, "VNIR/2017-07-27/2017-07-27__15-05-11-667/d1643679-bef3-4179-9912-d63bf4cd53c6_raw"), + os.path.join(raw_root, "VNIR/2017-08-23/2017-08-23__09-21-43-959/ea0e3408-ed1c-412d-aa68-e75ce2e902b1_raw"), + # vnir_middle + # NA os.path.join(raw_root, "VNIR/2018-08-18/2018-08-18__11-11-41-890/c5f4d50f-44ad-4e23-9d92-f10e62110ac7_raw"), + # NA os.path.join(raw_root, "VNIR/2018-10-08/2018-10-08__11-41-01-365/5e39a30f-d343-405e-a140-db26dc72eb59_raw"), + # vnir_new + # GEN os.path.join(raw_root, "VNIR/2019-06-17/2019-06-17__14-03-29-760/d51b6f4c-9246-4da8-9a6c-786bb1dc21bf_raw"), + + ] + + for p in input_paths: + apply_calibration(p) + +for sensor in ["VNIR", "SWIR"]: + sensor_dir = os.path.join(raw_root, sensor) + dates = os.listdir(sensor_dir) + for d in dates: + if d.startswith("2018") or d.startswith("2017"): + date_dir = os.path.join(sensor_dir, d) + timestamps = os.listdir(date_dir) + for ts in timestamps: + ts_dir = os.path.join(date_dir, ts) + flist = os.listdir(ts_dir) + for f in flist: + if f.endswith("_raw"): + fpath = os.path.join(ts_dir, f) + rawsize = os.stat(fpath).st_size + if rawsize > 24 * 1000000000: + print("filesize %sGB exceeds available RAM" % int(rawsize/1000000000)) + else: + print("Generating .nc file") + date = fpath.split("/")[-3] + timestamp = fpath.split("/")[-2] + + out_path = os.path.dirname(fpath.replace("raw_data", "Level_1").replace("SWIR", "swir_netcdf").replace("VNIR", "vnir_netcdf")) + out_file = os.path.join(out_path, "%s_netcdf_L1_ua-mac_%s.nc" % (sensor.lower(), timestamp)) + xps_file = out_file.replace(".nc", "_xps.nc") + returncode = subprocess.call(["bash", "hyperspectral_workflow.sh", "-d", "1", "-h", + "--output_xps_img", xps_file, "-i", fpath, "-o", out_file]) + + print("Calibrating "+f) + apply_calibration(fpath) diff --git a/hyperspectral/calibration_new/swir_new/readme.txt b/hyperspectral/calibration_new/swir_new/readme.txt index 6d86069..95ff8a2 100755 --- a/hyperspectral/calibration_new/swir_new/readme.txt +++ b/hyperspectral/calibration_new/swir_new/readme.txt @@ -1,4 +1,4 @@ -New SWIR: Feb 26, 2019 +New SWIR: After Feb 26, 2019 num_spectral_bands = 275 # the number of spectral bands in new VNIR sensor num_irradiance_bands = 512 # the number of spectral bands in new VNIR sensor Timage_scanning_time = 210 # This is an approximated number nad it is subject to change based on the camera scannning speed. diff --git a/hyperspectral/calibration_new/vnir_middle/best_matched_index.npy b/hyperspectral/calibration_new/vnir_middle/best_matched_index.npy new file mode 100755 index 0000000000000000000000000000000000000000..16ee1ce935aeb2354a2198a344cb793bf0021bd1 GIT binary patch literal 5424 zcmbW#EsP|`83y1ATLleV*lfqrfmVREvJ47ELt-_FG(oP5Z_ks&@wN9HXJW@mCGpSr zzon3Y4F@(H*l=LOfdvOPTxehRAqh;8N8Nh$>Fuxj-*5f;SIuvJ7Cs7}+&sH``Of{# z8`aI5&u?#@R5#CG+<$og^!>MA+&{Z}_5PPn-+OuY)BBg3r|sQO&&$_e|HYHno>ZS! z|M&TMcy&sO|1m!OZ{%H8 zD9^;qk%i%Xy_uLfvaos~&%~Z13m2_EjBJ>g*|F!yi8I3ok8>BS8QHL9X3v2m3ulIV zy;-wiV#|&_2Tm*uFZE`{$cBlT9ea+PI5T``K5IrcY?;|};K;(6;ZOF$nvsbuJN6tn zv2f<%BlB6aVPa;-o+Bp~hCka2D@HbKnb~pR$cZz<)_m5CY}hih=fIJLGs9o(g*77+ zTW0ngII(c%;$!n!vteS(jy(rXEDWF63oAx8Ow8=qbL7Or@K^I$GqPdJ%#H&`PMjG& zHJ>#j8@9~sIB;a)%Vjx3xR{%$^NMkcn*>^X2^;moUSKfLCWHE+3M z!!_dIamkta?(^TL@Ay!wa!thi*&$Q2tVZrCz&$BsP@960jCiG>%=4F7!W zeZ>VUE?F~j#fFI+w#?kIW6uKzjy!Q<;e|89zhZcGuDD>uC2K~m*f4R!mYF+t?0Mk8 zkta?pyl`gtOn)v|amkvID>h8ruw~|s9eW-)aO8;-3oo1*KG&ZMR$Q`X?;>0sAocX}3FCV{S!D}vA z^Oh?%Tyw*gTkhC#&jSY@dE&$~3uits?DXd~E7rVaWWzNRTW*=zanGIuj~sd8nS~cV zFnr}d;x#KSdCSNZ*G$}S%gh}+_B?Rl$P*_PUN|#+tv?s6xMa=96&tR(VaqLd?6~KF z1CKm$;+cgr9~i#zAMu(MYu+-l;hKppx6Ise&z=V!Ir79a3om?N_|`pe!HP@Pj9jr{ z;)X3VckH<5fdh{`apIX5&U|3_&OPv&6>Huyvf-MEEw{|vanGIy9y#*FGYc=A8NPQ9 zT(IJjH6vGSxaNi}x7@Mgo(B#*a^%D_3uits{NNsV&5BFjGIGT=6F1y4bH|Q74;(o1 M#EEBKIR6lT1fYRJasU7T literal 0 HcmV?d00001 diff --git a/hyperspectral/calibration_new/vnir_middle/bias_coeff.npy b/hyperspectral/calibration_new/vnir_middle/bias_coeff.npy new file mode 100755 index 0000000000000000000000000000000000000000..82c28921e4c24c470d69d03915ccd96e0b125afa GIT binary patch literal 5424 zcmbVQ=|2=)*fxk#82gfC#x^r3OJsk@QW8Z)8_Hf1#bYTEl{`sWq=j}QlBGfvovSFy zQX#2`N@Y)$G1hn9FYh1lo-fz$y3RTGx$krC-`P^vyE-{+RNUDi90UDlm{r|3CEj@Ip{aoDS`Ul1IC!Dlm&J?ecL~ zL5gvP&H*J=xQd$Sskf^Ds;cjgyQ#u`bangGh${STOyy-}R8hWGxS@WrI>eaqCF@?O zWAWmUOk+)T$luYmi5*afl0`Lbc8NNw{%y0Id7}pZ>rFPYjhK@39!2X=;6q*54QHH@;9Y#7)!w*BKqF(yju9(~*JoC)7s!gr!8HDS+N7Q9_s z3-@b019$z>g!F>OZf|22zQ^V)HV$a59NGJRceSXz8y8N<|Hmd_Y-t_{!fd zx4?%HN7@VchtPG_SZwX0UMi z!e$EXI15R)>eE-LYGPu|Ok`C%6Z<}?eq#?YF~@)6tLbtk@=EymTT~e^EBLQb#ZUvc z+{|mGq%^R3dEw(7E7dVw6%`eitp?4!YT3_^)zIF5-0a5|b)2XjRIAyhhMG`^#`$P9 zY#t?=is`CoNVv<|YNLu(<1xuqJu2MwFZeHRp$cm3Pq@6(QGt6!%gWxfD)`X^!(WXm z=>61ZqpG3`zB@kQSpzCa_1$e(dq@SH&-g!j^wFW=Kd2B+SHWz)S)0QU9ih-)?%7X= z%V@{w7H(cQ>pVE2Yo-F-2nkucH*}O=IwE6KNymqyrgqU;biD7T{A8S_<8zDCpzJIS zM!wheW4mYwi1=+O{+EX9`wHuSrqOXX{XZ+;Mj94`8Qd-FqanGdx7$dP4(65a2i4^? zP|D_H)bwa*X_tzMj-a77xK8>AFAe+ooQrn1DC5vVDRmD!W$b?T$=$d^8A4-H=_V_b z@#CL^W;y#6;o2P{_vx4d0z@^W`{gh!x^xwt zM;;z-gJOJ3lVW;ZzkGa@PiKf5$D~+Zk6Swh6X()_Vr&-TQ!ON{9 zE5cI>c4rRM{|b_VqU^6F>vu@v+OWB?tGy&riYFHNHb|hoYvSyhOA?s4PH%i_D}gV0 z`{z!skw9iJuayT=0{?oN27ghMfW({fAJ-Dak!ttpNPea`ZXQ@>vRzjkO5xt+*E7U$ z-BMdj+fED_-Z5S&(Ntt6ZYtxyM8&-de)(cODjJm+xm;5aeBG0&7cT) z^1ij{nu;LBZ`BB`LKsZff9&@D5=NPZL7q+Z60B05d=i$t1QD-Hcvj3WMwOXCXvXMb z>=-I|IH<7%FV=Xy4!ykuA>$Y{ZdrmiQhQl4nZlTie|b#soCtnD(mj{{Qv^OQ(^`CT zMG=s*C5C9g80aeBKzM9z~bk(y(&Edc$qOgV45ocx7}%$qn3hjzgp%M zcSR6F#*%G?I|Z?+)AMFz<^q&#?5~lMUVxy#663Ac_`%oo??786A6hBD)r772aC5u* z@ET!0(D(c!e|G^NzMOG?w@H8>`5evX3*!0EbXi)w`3xWarFqSbd-LJ?l3SZjz2`;Q z*~$gc6TG;T6EvN0g%@kY6!s3e^P(qrYfmSKg3FcD4SV}}q5ZgAQ!I=R-rDO_0@%EW z9uz*f!;=q}_SDcV9Qm;B3z}xL`A{4p8dOf_!;tWb?&&yQwCCjB*2$q@xyU5>E=w zrtOs#x1b=`J>Es$ih^bL{m&Z~@E|LM%@-03jSyv7B%MjE3B4PZ8${1{Z}g&%iX6yv)Z<#^9%(Iay_cfpDCcd&Xo@* z6eRr{(<-}$t6xhy)5Gm&XxS%|W=(wkJ()N9C=&9K1_k5O2MgvRTSuEZ(F!?4+Zx2I~x)PcraP&;Zq&LgX~pN^Z{=k z1ay==lJA`-eLU2XZlyU=rY&Ohr+kJy6qIYPnVu#Jicf_Fg=dI`*9yC{sng`As6nvX zq8XBVBuOCJWrk!3X$tK0o*_AQ3O-XOW=Qi#7k1{J88YmWzH>ZemPAElUnp*yA#pt! z8@l{vNWcYN)0cMBr0;G-%wXCSv0WTF?N`SkM~5tOBQ-cAYDDRdv`xGJj^C1v>8KPylf&@^~22AdxBWyh&R@yO_0KpaIGgg6XaZYn3m4f39>b_ zBvv|jf;^^u=z5sKnFJ}^nz9^P$vI?E>J4FBlFj}t^A%s0>B$^?mdu{P!9v2jx5YR6oN zapJIawB}*lIPp-9cJ|#nPB1b8d_i=_(;B{hl!rWmnZO6fjQYTHS?`|C=Bjk3Tn^V<(8M zSbe1CdNy&hE>#gwVUxpa?Z1h6u?bCh{LhZl+s1hCLuzBzbg~iWJJ&c(QDWw zsV~Np_F{r8Q29I$2M>;$nse5CbV6r0HQB-j0C zF-Zy?`o2Z4`z99P-{I`M`@#4w>*vX5W*aCg<~udj8orO;);Vdz>DbCbb)i zmM`8nL)aMuQIQkV#7cFuS>N^%Br&n%#T;~Bzcr*5LoIpNoeP9UNf3v zlgz$}R*O4q5~S&!G%n93-ny+%)}LS#yB?E8k%?TMV!U}@I~O~e3_P+ff9ksEKgb6|JJ zM-G{%RL!~cPLXy!t9=rWr^&jW8-tbA(n3K;_SP+ zIdU-U^)KQ3^JKJAvhdCv4=!n_I6T~?`&`zp5{l0+mmWr zi3P|@DsGQl$dB9Q52y#SdEqL##acO!f`s^xxc#H^((dU$*kLe7#4XY#;KHr{XE=U)&>VSctZO~4JxA)o9o-h&&5^19j;Nm7K2PGS z=*N1!=ZPAhrLu(}5B|DX>03RWCr$;SVQyUf;OZdxA$*>=3WXC%aULvezqKiBEf37c zYoEmI<3WF}e&U`A9#sDgzOiK+1#b(~raT6Dz*jL=N1x%rzeV|-_Vqld2w0YJU6z8u zgrqf&|4{J8EJR^6-EBcm{jh4=UmWO*kP(9UWQP2?QWBV z)!HrHR#B4pVD6M-kSmF`t9x@~rX(OrmG7dw6Gxj0+sM^a9CIg?wj`&E;mFmy*-e#H zbR6{KADI$`sm6x{wND}l7pU1W-6;Ygqjc}Li6XePd(W(jod}jXMmBOrgz?6+;@9rS z!f=zkk}cRO0*x&1$rsy2FedE1a^Z{!x?ihzE+`d6ug@>(dva8i9Lswgb%BcFqOyH< zDOB9vxw>ghKNSm3?Md46n2NzOV})`rs5o-Zi--kMF=mi-w8EPT#i)2wRZA)&vNKi{ zs#1}v>b&V_CKX%$COjIMp<;XM*@ZL?75q=K*~-7EcpG%@YU^?_$Q@6m8p?^m)%#27 ziDWV8|Cl_wje9RFEqwG{d`1j`481gYX>oi~PKs|z6~|{e`3Ikx#X-%wZ?EesfnBGr zi75q2!ms|E&T4TfJpOS&wAWf1{LThW6lWRe=)Pag>6gVhn&aB4*Yc2Zi-3LqQuv)x zSXnou0NT{Blj48^Y|?%Wt;`8dk*flsd!0c14hq{GS6a6%lG^Gm&nkgtn@6 zIZ}E`&`+U_n~W*K=g{Qlu6jj0?Wr7H|4|XOEwOc$;YwI` zY2>0QB`kNn5GJIc3}MdtmhCT<@%_^5wm%a}pnX?ZEM}&Rk+zASs4P4k4gu=P)2H+^tPu&8S1u*jvX9j?3lmio^MP;{jk67V@n!F zca>ZnbfBSY`2A$M6%9$Ab`+5u8h%hSx;#$P5N|K6I+RPp4yywm3=ul)MqKv|PtzdE zV`1CXOv58S`GhI~I&L#|w_N7VcXm|kkBv<>J|Rap33lvhJUioR`usv3q<-TMd3 z)o@aP-qkLv21oalkb=Exxagi=&sI}|l;uGE&TL23JKqyhc~Yk%l!J_ODRKaOBwY z>#NoAM5fe#V^{6P1UhLI9Ev4ULA^QABHVrx&H!7^(X40)S-7grj@@_9nCJ8 z7AxX3pnq1>TGCeo2^p(z%Q|TwxHj6{;5%1u)t#x>r-8Yw`5nH|8pv+iQ?QGrfn$4@ zeKEArfJMd0%aBSWWd9+26mmbPkpVx1ntb}Io_QN@INwkba7?C#L2XD+=vOusk|mWEWY5|nigZbYR3tK+nK5CkW1WQ=W-O(Yr6Qq4w(2Ph=l=W zTswjr1z7TLsAk?2A7}pgQlC373e}bRnPXji^uAC0b~=iW_*rH$Zczg4zkM;WTquCc zqi+W#rUb}XzDoL{dlY;OyK+_@k48)90f(HJzMUZh%`se5k5oGr~Uqe19 zLNTEr?~{TE9^Tc{X*)!Ccp+c=`hXCWXX-*nSrJ<8*>C1G2{Fu@uOS>1;>JbwHW^JJ zhPEiY&vX>xth&pd9d;t{d-@~AxCW2{Aj*Y7>|!O_O2@@;?cmW*-~P61Qa#1`MdaW zXgk|x`uS@ts*B0x+sa}1+OzbRWF3Sv)?(YtPpBpOBE{L zMM0}m`FKfD6y);Ob+rlvF!-V4vb>a!l~xvSZ?^MsB1lrL;kp1yP7l=zX?*O+*Egl4 z@sS(oFPX`V#08lF*@3?i=v(w`%rGnh)*D&3)pzsoAi83hCE~&Ev4qFxxe-w5c+T=* zL_oemQnjNu0*}fC_RTqBoaJjcCLe+OC2eWemOLE0#WmQH%7gO!ZAv2!Jp58z-LS@n zhgFJ6PI^5&*wdWumRIob?~Mh;>1%l?b-kyOBFn>VllM#GesE!1*XOhF2^VY42_FXK zxadfVJ#lJ;gJV0gJF$d|)6>N&{@=LBs<_h`@RyE?6%bk>=N<}&)pNR%{|!O?vrNh+ zxlkDHwbKb?gy7@lo_NhC!O;BHZ5x;zf@6N`XJ&LlFg$mnuOui0pCrSUG3JKiO|51{ zUS}w>FZz(|dqN?~^LgR9I1KFvVndA&h2n&U7Q>=71l~Py7u*~|pmQ)Hcj8AdW;iEPIoHRDXU+&IK+E@HxC zV8p7}l8MyVp%Vtbym9>ePu3j;CM?%})=P2o#=-34JNK^lMl+MrkXGe|n^UFQ`uSeC z!ynQsZtz0-uV`bZS>A}NxDZB|=Z$-fW{b?&-gv#%z}n-ZH~w%32iLA*VyD#+``BD2 zUXCTa&gx}?TY5=DWdRdX+q(LPo_J$(-Rk<$OfO6|WlP7+@*Mc42J9H6ANb;=dw8HZAf=AeOvkL{&*CGPbof5=*B`T_ zgPyL<3DBWqvEgssGmUh-+U@CJ;7i9l|II%=;^;W9O72i)(UC9k@}jBJk@&dfle0A) zg)i@i>CMpaS1I|jZ95G;q3Lr{j?+-D@RTK|OULf=%`&|rI?k$H`TJ=j9pmeLrVZG1 zoG|(G$RmJ`s`;*ihBb6tVAV-b~UaE6myebs@6ySA5_hVF}b^8NW`V*a7&C#x$0XqX-g zdi^w%hEdH0Tjs8yp_PzUbwY*)ZFPqyFJDr@Yf5_+8bO7=quKr*o;$|m(=56*si?hj z)KR^_9S?Vno@(uOhe}v{gO>yqqm(<9ok~>Xo-BATMW%w7m_ypVnudpG4yGZi#&OWMoq#%V~3x_;rb`1|iSPT3i0G}H+b zDq=3tkWfD7lT0cN_obeG%T}Ue-=6JdN^|MZfBNX#`T;SoJH*vugofrg|Mi8FG{j5H zetWl&2Gb*7obGnh@b08_#@916^lBT;Gqk5cD!+)-4M%iCK*f!}OZlN~!L=+e|UHm?i=W%KBVjmJ>-+v#@_aY%(_Ox5hF%s%4 zUPSBKk2?TN_D zxWKRdK!9|47pHC^5tn7AE^rIOZzl6}+7UPu73;TXaYuD0m-i86x89 z`O6u+okYY}2a-)xh|sBvZq48m@aT2f4g*EGQhaD$CrRip|bRl5dp;^ z0~41v5inj{Gh6GY8`P7H&R0niFbIl4_ZouKwi~v=~)?3&AxZ(Dk z5b8b>0i6M3h2?TYbd0im7K`%;`LeNTT^JFH2F_^@oQTLsdzQB)P#o7LP8LoQk+6%F zt$LLRirI>JCD}x{bImvO#1c`nHq<3;I}v9~@@FSpA|T0{M0F`3z*iV~yQ-1^p4Dt0 zEej$XtioRD@Q5&Fku&#>5OH9~oqNg=B;?(&Su@Teq3GL}49YukJ$UMTT$xM8iRY0+ zub-3AaOSX|wh;y4CQep+5-2$LML8l`T#wx*yE3bSDR?n+rnZhu!S|JKzDF5Q&|wzF*!$RtDOR_wMz(q!zPZ``!2i3B3=cx})EGT4!Y4U!BJn#|r9 z8H?+Vs=htFPrN_mO0U^Z2Z*q2WxA=KB4TP!-XEh+MDRldkC(qDqUm0fx7;5hs_Ng? zXK9ikok7^`D6Z3UkBZFRc$45C=H}M;uh?HX7N{GN!QnPUc|LB3pF+F+f z)eLl4ebMzx_rQVutJ9Flrp#*KEBg-_;?X5Nqp`)`eeJ-Hl#Df;mzBnqD7Ny_M zccWs?yHED(^{7ylv|Q`Fk&4OrHV2bI#bQs^$xdl1-iy}9q^zRi&(oqBQVSJTF^?9h zy`$o{jqH(o$~1I&TV#yRrQvl-G^<=}Yire|o^^})7v&@tYS56nP5pwQjaUyqga`el z(O~TC`23()KMh|QuU$Ng4u_koTq^#hVS+iw_MbW$z>P9jDWD;MBl%RbOU#?7A5NPP z>*Ym*6~@yvFm4~aPG3j|?Q`|@edp*~==3Gpp(vfq1S&U*D9rtEdq}=-{ zzORz+_Ii!e!DW}OZnyJ*uDqASty3O2N!XtH^{WSzFEN#d_cQRPM|!eof`RY-cN1>C zJdBHiow%j%1)I8qI=54q_~B0OeMk3&%(p_%-z)v`?VpuRha&=U)_7;MX*3I!M=F>9 zG+^N#^TVxMS6S#>xHxLt_dpaYjZUV<1|a$Dc~RYSe@t8%sJ*$!AA8)WVU~Y=v6WW5 z-E-0hrKznkZ@2lNs@cAx#>@wInrTkO(mq(F`XIxpj|tP(eZN$%FtJYAa;9Fyguak6 z*}B;W3g4;H7H^m+-{gBXQM|sW@M^F`sSi9gzwT)J?t>#IwMR~8_~M#>8D1y*;$l-- zW#|$=yt*q}(#%|H}U*W#+-=A;WXX^{<5_jWlEnnQ} zQh6HEj?>Y8SQ2h}vW(&f-TSI<%6|A^ z{it4w(>#BiP5OKM;1PdVVr3#xKB5TI56li8cILO zMxOW5fu48r8M7f>Ytx>%i;Xpzx@IX^Y~<)#4bw$z z=!bfk<)39ESu*azK6wt>n9n4g1Z*Vro=eca$%e_Z3RmL@HW;nFT}Q?5qaD)eibH%h zo^)o}_lo1yWjp*{`IXHe|bdQTO2Srh2=etA> zY7Li5)Hrf58up~Yq?H3D`5GtPs~k*M6dy5r!@*dCg7u6m7kxF!wUZ`X%t;CAR-4C# z&dB>W^GvyT++22Ya+-s#oh4&$DmchGVb*?Wj16x2ioxyc*oeuJT5K-fS4Kwc8cVA% zH1Zl14hMzf<*~*|QZolL2aZV0*5u=2@|3%Oz6e$w{*51EV_|My;FKQ`hxg5dRqwvW z!&T9__CM|s1ZqDX-Zd2u<-9+|?D=ShOq{c$@tth+FGzPDUyQ|DKM0hdu{%B8S zG$v|Jxtdf&;pxMdy^i$)=oa*AEVAO`AV(#18hpHTz1;4(i;wb03_EWRK1|-&nkkv^ zu}6?r<#Arzr`h0UOEl&~KT4z+uEdANeTlw?rF^KrH5B%S@bP!&zJ47#AFe6A&pH#u zeH``SYSmmm9DWpAH7g3BA!)7l7B9@a$oB;E(qE}953SjN9WAVdM0Yuc61CoUT zJXHIs)tfH>e~InOoSOo$d`4d%`XvB$vNpN3UjX-p(HTWv6uwN%@nhLWL-zwee1SqV zM*1JDmYImch;;RA=X=pGU;gczk47|Xq_0@s+8T}R+ul0pHbkLB`SGU515wCOnuwVG z7KH_EPY14dMT27GRNMVC3R~-RH}1}lMsTHw{ai^1=bWaoP0B)iXm@$CE>sA4IprZY zS0R#&}Q~jc;mK>5aDvsFJB!JqDqaiz%E#b-NobAEY=CJqfO9Q=paPmuiD!V6d|P8dG>wV zg_s_%&{oqDf}Bw>8Gl9y#bBw+vOYp|4K6ZoD;C1>MeV(kL@{1|^j&?D5Z=BD8AbU* zm>n(-w<2`S9DUSb4_}4;Hkq~yVUSF?^=Yj2k8YRbtXe+&YU|G93udCLZ);9|A zCuLNk+FFD#)q1I5PZ4s(4^ZV+5j?3@yv^$3zR~wc*(G@*Tv;>TvGk@0L4kznMM)yC i max_gb * 1000000000: + self.log_skip(resource, "filesize %sGB exceeds available RAM" % int(rawsize/1000000000)) + return False + + raw_file = None + for fname in resource['local_paths']: + if fname.endswith('raw'): + raw_file = fname + if raw_file is None: + raise ValueError("could not locate raw files & metadata in processing") + + # Perform actual processing + self.log_info(resource, 'invoking calibration.py on: %s' % raw_file) + apply_calibration(raw_file) + self.log_info(resource, '...done' % raw_file) + + self.end_message(resource) + + def process_message_old(self, connector, host, secret_key, resource, parameters): + self.start_message(resource) + # clean tmp directory from any potential failed previous runs flist = os.listdir("/tmp") for f in flist: