49
49
from pyresample import geometry
50
50
from satpy import CHUNK_SIZE
51
51
from satpy .readers .file_handlers import BaseFileHandler
52
- from satpy .readers .utils import get_geostationary_angle_extent , np2str
52
+ from satpy .readers .utils import get_geostationary_mask , np2str
53
53
54
54
AHI_CHANNEL_NAMES = ("1" , "2" , "3" , "4" , "5" ,
55
55
"6" , "7" , "8" , "9" , "10" ,
@@ -317,126 +317,115 @@ def get_area_def(self, dsid):
317
317
self .area = area
318
318
return area
319
319
320
- def geo_mask (self ):
321
- """Masking the space pixels from geometry info."""
322
- cfac = np .uint32 (self .proj_info ['CFAC' ])
323
- lfac = np .uint32 (self .proj_info ['LFAC' ])
324
- coff = np .float32 (self .proj_info ['COFF' ])
325
- loff = np .float32 (self .proj_info ['LOFF' ])
326
- nlines = int (self .data_info ['number_of_lines' ])
327
- ncols = int (self .data_info ['number_of_columns' ])
328
-
329
- # count starts at 1
330
- local_coff = 1
331
- local_loff = (self .total_segments - self .segment_number ) * nlines + 1
332
-
333
- xmax , ymax = get_geostationary_angle_extent (self .area )
334
-
335
- pixel_cmax = np .rad2deg (xmax ) * cfac * 1.0 / 2 ** 16
336
- pixel_lmax = np .rad2deg (ymax ) * lfac * 1.0 / 2 ** 16
337
-
338
- def ellipse (line , col ):
339
- return ((line / pixel_lmax ) ** 2 ) + ((col / pixel_cmax ) ** 2 ) <= 1
320
+ def _read_header (self , fp_ ):
321
+ """Read header"""
322
+ header = {}
340
323
341
- cols_idx = da .arange (- (coff - local_coff ),
342
- ncols - (coff - local_coff ),
343
- dtype = np .float , chunks = CHUNK_SIZE )
344
- lines_idx = da .arange (nlines - (loff - local_loff ),
345
- - (loff - local_loff ),
346
- - 1 ,
347
- dtype = np .float , chunks = CHUNK_SIZE )
348
- return ellipse (lines_idx [:, None ], cols_idx [None , :])
324
+ header ['block1' ] = np .fromfile (
325
+ fp_ , dtype = _BASIC_INFO_TYPE , count = 1 )
326
+ header ["block2" ] = np .fromfile (fp_ , dtype = _DATA_INFO_TYPE , count = 1 )
327
+ header ["block3" ] = np .fromfile (fp_ , dtype = _PROJ_INFO_TYPE , count = 1 )
328
+ header ["block4" ] = np .fromfile (fp_ , dtype = _NAV_INFO_TYPE , count = 1 )
329
+ header ["block5" ] = np .fromfile (fp_ , dtype = _CAL_INFO_TYPE , count = 1 )
330
+ logger .debug ("Band number = " +
331
+ str (header ["block5" ]['band_number' ][0 ]))
332
+ logger .debug ('Time_interval: %s - %s' ,
333
+ str (self .start_time ), str (self .end_time ))
334
+ band_number = header ["block5" ]['band_number' ][0 ]
335
+ if band_number < 7 :
336
+ cal = np .fromfile (fp_ , dtype = _VISCAL_INFO_TYPE , count = 1 )
337
+ else :
338
+ cal = np .fromfile (fp_ , dtype = _IRCAL_INFO_TYPE , count = 1 )
339
+
340
+ header ['calibration' ] = cal
341
+
342
+ header ["block6" ] = np .fromfile (
343
+ fp_ , dtype = _INTER_CALIBRATION_INFO_TYPE , count = 1 )
344
+ header ["block7" ] = np .fromfile (
345
+ fp_ , dtype = _SEGMENT_INFO_TYPE , count = 1 )
346
+ header ["block8" ] = np .fromfile (
347
+ fp_ , dtype = _NAVIGATION_CORRECTION_INFO_TYPE , count = 1 )
348
+ # 8 The navigation corrections:
349
+ ncorrs = header ["block8" ]['numof_correction_info_data' ][0 ]
350
+ dtype = np .dtype ([
351
+ ("line_number_after_rotation" , "<u2" ),
352
+ ("shift_amount_for_column_direction" , "f4" ),
353
+ ("shift_amount_for_line_direction" , "f4" ),
354
+ ])
355
+ corrections = []
356
+ for i in range (ncorrs ):
357
+ corrections .append (np .fromfile (fp_ , dtype = dtype , count = 1 ))
358
+ fp_ .seek (40 , 1 )
359
+ header ['navigation_corrections' ] = corrections
360
+ header ["block9" ] = np .fromfile (fp_ ,
361
+ dtype = _OBS_TIME_INFO_TYPE ,
362
+ count = 1 )
363
+ numobstimes = header ["block9" ]['number_of_observation_times' ][0 ]
364
+
365
+ dtype = np .dtype ([
366
+ ("line_number" , "<u2" ),
367
+ ("observation_time" , "f8" ),
368
+ ])
369
+ lines_and_times = []
370
+ for i in range (numobstimes ):
371
+ lines_and_times .append (np .fromfile (fp_ ,
372
+ dtype = dtype ,
373
+ count = 1 ))
374
+ header ['observation_time_information' ] = lines_and_times
375
+ fp_ .seek (40 , 1 )
376
+
377
+ header ["block10" ] = np .fromfile (fp_ ,
378
+ dtype = _ERROR_INFO_TYPE ,
379
+ count = 1 )
380
+ dtype = np .dtype ([
381
+ ("line_number" , "<u2" ),
382
+ ("numof_error_pixels_per_line" , "<u2" ),
383
+ ])
384
+ num_err_info_data = header ["block10" ][
385
+ 'number_of_error_info_data' ][0 ]
386
+ err_info_data = []
387
+ for i in range (num_err_info_data ):
388
+ err_info_data .append (np .fromfile (fp_ , dtype = dtype , count = 1 ))
389
+ header ['error_information_data' ] = err_info_data
390
+ fp_ .seek (40 , 1 )
391
+
392
+ np .fromfile (fp_ , dtype = _SPARE_TYPE , count = 1 )
393
+
394
+ return header
395
+
396
+ def _read_data (self , fp_ , header ):
397
+ """Read data block"""
398
+ nlines = int (header ["block2" ]['number_of_lines' ][0 ])
399
+ ncols = int (header ["block2" ]['number_of_columns' ][0 ])
400
+ return da .from_array (np .memmap (self .filename , offset = fp_ .tell (),
401
+ dtype = '<u2' , shape = (nlines , ncols ), mode = 'r' ),
402
+ chunks = CHUNK_SIZE )
403
+
404
+ def _mask_invalid (self , data , header ):
405
+ """Mask invalid data"""
406
+ invalid = da .logical_or (data == header ['block5' ]["count_value_outside_scan_pixels" ][0 ],
407
+ data == header ['block5' ]["count_value_error_pixels" ][0 ])
408
+ return da .where (invalid , np .float32 (np .nan ), data )
409
+
410
+ def _mask_space (self , data ):
411
+ """Mask space pixels"""
412
+ return data .where (get_geostationary_mask (self .area ))
349
413
350
414
def read_band (self , key , info ):
351
415
"""Read the data."""
416
+ # Read data
352
417
tic = datetime .now ()
353
- header = {}
354
418
with open (self .filename , "rb" ) as fp_ :
355
-
356
- header ['block1' ] = np .fromfile (
357
- fp_ , dtype = _BASIC_INFO_TYPE , count = 1 )
358
- header ["block2" ] = np .fromfile (fp_ , dtype = _DATA_INFO_TYPE , count = 1 )
359
- header ["block3" ] = np .fromfile (fp_ , dtype = _PROJ_INFO_TYPE , count = 1 )
360
- header ["block4" ] = np .fromfile (fp_ , dtype = _NAV_INFO_TYPE , count = 1 )
361
- header ["block5" ] = np .fromfile (fp_ , dtype = _CAL_INFO_TYPE , count = 1 )
362
- logger .debug ("Band number = " +
363
- str (header ["block5" ]['band_number' ][0 ]))
364
- logger .debug ('Time_interval: %s - %s' ,
365
- str (self .start_time ), str (self .end_time ))
366
- band_number = header ["block5" ]['band_number' ][0 ]
367
- if band_number < 7 :
368
- cal = np .fromfile (fp_ , dtype = _VISCAL_INFO_TYPE , count = 1 )
369
- else :
370
- cal = np .fromfile (fp_ , dtype = _IRCAL_INFO_TYPE , count = 1 )
371
-
372
- header ['calibration' ] = cal
373
-
374
- header ["block6" ] = np .fromfile (
375
- fp_ , dtype = _INTER_CALIBRATION_INFO_TYPE , count = 1 )
376
- header ["block7" ] = np .fromfile (
377
- fp_ , dtype = _SEGMENT_INFO_TYPE , count = 1 )
378
- header ["block8" ] = np .fromfile (
379
- fp_ , dtype = _NAVIGATION_CORRECTION_INFO_TYPE , count = 1 )
380
- # 8 The navigation corrections:
381
- ncorrs = header ["block8" ]['numof_correction_info_data' ][0 ]
382
- dtype = np .dtype ([
383
- ("line_number_after_rotation" , "<u2" ),
384
- ("shift_amount_for_column_direction" , "f4" ),
385
- ("shift_amount_for_line_direction" , "f4" ),
386
- ])
387
- corrections = []
388
- for i in range (ncorrs ):
389
- corrections .append (np .fromfile (fp_ , dtype = dtype , count = 1 ))
390
- fp_ .seek (40 , 1 )
391
- header ['navigation_corrections' ] = corrections
392
- header ["block9" ] = np .fromfile (fp_ ,
393
- dtype = _OBS_TIME_INFO_TYPE ,
394
- count = 1 )
395
- numobstimes = header ["block9" ]['number_of_observation_times' ][0 ]
396
-
397
- dtype = np .dtype ([
398
- ("line_number" , "<u2" ),
399
- ("observation_time" , "f8" ),
400
- ])
401
- lines_and_times = []
402
- for i in range (numobstimes ):
403
- lines_and_times .append (np .fromfile (fp_ ,
404
- dtype = dtype ,
405
- count = 1 ))
406
- header ['observation_time_information' ] = lines_and_times
407
- fp_ .seek (40 , 1 )
408
-
409
- header ["block10" ] = np .fromfile (fp_ ,
410
- dtype = _ERROR_INFO_TYPE ,
411
- count = 1 )
412
- dtype = np .dtype ([
413
- ("line_number" , "<u2" ),
414
- ("numof_error_pixels_per_line" , "<u2" ),
415
- ])
416
- num_err_info_data = header ["block10" ][
417
- 'number_of_error_info_data' ][0 ]
418
- err_info_data = []
419
- for i in range (num_err_info_data ):
420
- err_info_data .append (np .fromfile (fp_ , dtype = dtype , count = 1 ))
421
- header ['error_information_data' ] = err_info_data
422
- fp_ .seek (40 , 1 )
423
-
424
- np .fromfile (fp_ , dtype = _SPARE_TYPE , count = 1 )
425
-
426
- nlines = int (header ["block2" ]['number_of_lines' ][0 ])
427
- ncols = int (header ["block2" ]['number_of_columns' ][0 ])
428
-
429
- res = da .from_array (np .memmap (self .filename , offset = fp_ .tell (),
430
- dtype = '<u2' , shape = (nlines , ncols ), mode = 'r' ),
431
- chunks = CHUNK_SIZE )
432
-
433
- invalid = da .logical_or (res == header ['block5' ]["count_value_outside_scan_pixels" ][0 ],
434
- res == header ['block5' ]["count_value_error_pixels" ][0 ])
435
- res = da .where (invalid , np .float32 (np .nan ), res )
419
+ header = self ._read_header (fp_ )
420
+ res = self ._read_data (fp_ , header )
421
+ res = self ._mask_invalid (data = res , header = header )
436
422
self ._header = header
437
-
438
423
logger .debug ("Reading time " + str (datetime .now () - tic ))
424
+
425
+ # Calibrate
439
426
res = self .calibrate (res , key .calibration )
427
+
428
+ # Update metadata
440
429
new_info = dict (units = info ['units' ],
441
430
standard_name = info ['standard_name' ],
442
431
wavelength = info ['wavelength' ],
@@ -453,7 +442,10 @@ def read_band(self, key, info):
453
442
satellite_altitude = float (self .nav_info ['distance_earth_center_to_satellite' ] -
454
443
self .proj_info ['earth_equatorial_radius' ]) * 1000 )
455
444
res = xr .DataArray (res , attrs = new_info , dims = ['y' , 'x' ])
456
- res = res .where (self .geo_mask ())
445
+
446
+ # Mask space pixels
447
+ res = self ._mask_space (res )
448
+
457
449
return res
458
450
459
451
def calibrate (self , data , calibration ):
0 commit comments