adding requests to conda install docs
[JPSSData.git] / JPSSD.py
blob2e985cfe8455d33b1f86bf8a0077ee3856607da9
1 import warnings
2 warnings.filterwarnings("ignore")
3 import numpy as np
4 import json
5 import requests
6 import urlparse
7 import os
8 import sys
9 import re
10 import glob
11 import netCDF4 as nc
12 from pyhdf.SD import SD, SDC
13 from utils import Dict
14 import scipy.io as sio
15 import h5py
16 import datetime
17 import time
18 import matplotlib.colors as colors
19 from itertools import groupby
20 from subprocess import check_output, call
22 def search_api(sname,bbox,time,maxg=50,platform="",version=""):
23 """
24 API search of the different satellite granules
26 :param sname: short name
27 :param bbox: polygon with the search bounding box
28 :param time: time interval (init_time,final_time)
29 :param maxg: max number of granules to process
30 :param platform: string with the platform
31 :param version: string with the version
32 :return granules: dictionary with the metadata of the search
34 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
35 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
36 """
37 from cmr import GranuleQuery
38 api = GranuleQuery()
39 if not version:
40 if not platform:
41 search = api.parameters(
42 short_name=sname,
43 downloadable=True,
44 polygon=bbox,
45 temporal=time
47 else:
48 search = api.parameters(
49 short_name=sname,
50 platform=platform,
51 downloadable=True,
52 polygon=bbox,
53 temporal=time
55 else:
56 if not platform:
57 search = api.parameters(
58 short_name=sname,
59 downloadable=True,
60 polygon=bbox,
61 temporal=time,
62 version=version
64 else:
65 search = api.parameters(
66 short_name=sname,
67 platform=platform,
68 downloadable=True,
69 polygon=bbox,
70 temporal=time,
71 version=version
73 sh=search.hits()
74 print "%s gets %s hits in this range" % (sname,sh)
75 if sh>maxg:
76 print "The number of hits %s is larger than the limit %s." % (sh,maxg)
77 print "Use a reduced bounding box or a reduced time interval."
78 granules = []
79 else:
80 granules = api.get(sh)
81 return granules
83 def search_archive(url,prod,time,grans):
84 """
85 Archive search of the different satellite granules
87 :param url: base url of the archive domain
88 :param prod: string of product with version, for instance: '5000/VNP09'
89 :param time: time interval (init_time,final_time)
90 :param grans: granules of the geolocation metadata
91 :return granules: dictionary with the metadata of the search
93 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
94 Angel Farguell (angel.farguell@gmail.com), 2018-01-03
95 """
96 ids=['.'.join(k['producer_granule_id'].split('.')[1:3]) for k in grans] # satellite ids in bounding box
97 granules=[]
98 dts=[datetime.datetime.strptime(d,'%Y-%m-%dT%H:%M:%SZ') for d in time]
99 delta=dts[1]-dts[0]
100 nh=int(delta.total_seconds()/3600)
101 dates=[dts[0]+datetime.timedelta(seconds=3600*k) for k in range(1,nh+1)]
102 fold=np.unique(['%d/%03d' % (date.timetuple().tm_year,date.timetuple().tm_yday) for date in dates])
103 urls=[url+'/'+prod+'/'+f for f in fold]
104 for u in urls:
105 try:
106 js=requests.get(u+'.json').json()
107 for j in js:
108 arg=np.argwhere(np.array(ids)=='.'.join(j['name'].split('.')[1:3]))
109 if arg.size:
110 ar=arg[0][0]
111 g=Dict(j)
112 g.links=[{'href':u+'/'+g.name}]
113 g.time_start=grans[ar]["time_start"]
114 g.time_end=grans[ar]["time_end"]
115 g.dataset_id='Archive download: unknown dataset id'
116 g.producer_granule_id=j['name']
117 granules.append(g)
118 except Exception as e:
119 "warning: some JSON request failed"
120 print "%s gets %s hits in this range" % (prod.split('/')[-1],len(granules))
121 return granules
123 def get_meta(bbox,time,maxg=50,burned=False,high=False):
125 Get all the meta data from the API for all the necessary products
127 :param bbox: polygon with the search bounding box
128 :param time: time interval (init_time,final_time)
129 :param maxg: max number of granules to process
130 :return granules: dictionary with the metadata of all the products
132 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
133 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
135 granules=Dict([])
136 #MOD14: MODIS Terra fire data
137 granules.MOD14=search_api("MOD14",bbox,time,maxg,"Terra")
138 #MOD03: MODIS Terra geolocation data
139 granules.MOD03=search_api("MOD03",bbox,time,maxg,"Terra")
140 #MOD09: MODIS Atmospherically corrected surface reflectance
141 #granules.MOD09=search_api("MOD09",bbox,time,maxg,"Terra")
142 #MYD14: MODIS Aqua fire data
143 granules.MYD14=search_api("MYD14",bbox,time,maxg,"Aqua")
144 #MYD03: MODIS Aqua geolocation data
145 granules.MYD03=search_api("MYD03",bbox,time,maxg,"Aqua")
146 #MOD09: MODIS Atmospherically corrected surface reflectance
147 #granules.MYD09=search_api("MYD09",bbox,time,maxg,"Terra")
148 #VNP14: VIIRS fire data, res 750m
149 granules.VNP14=search_api("VNP14",bbox,time,maxg)
150 #VNP03MODLL: VIIRS geolocation data, res 750m
151 granules.VNP03=search_api("VNP03MODLL",bbox,time,maxg)
152 #VNP14HR: VIIRS fire data, res 375m
153 #granules.VNP14HR=search_api("VNP14IMGTDL_NRT",bbox,time,maxg) # any results in API
154 if high:
155 url="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
156 prod="5000/NPP_IMFTS_L1"
157 granules.VNP03HR=search_archive(url,prod,time,granules.VNP03)
158 prod="5000/VNP14IMG" # product
159 granules.VNP14HR=search_archive(url,prod,time,granules.VNP03HR)
160 if burned:
161 #VNP09: VIIRS Atmospherically corrected surface reflectance
162 url="https://ladsweb.modaps.eosdis.nasa.gov/archive/allData" # base url
163 prod="5000/VNP09" # product
164 granules.VNP09=search_archive(url,prod,time,granules.VNP03)
165 return granules
167 def group_files(path,reg):
169 Agrupate the geolocation (03) and fire (14) files of a specific product in a path
171 :param path: path to the geolocation (03) and fire (14) files
172 :param reg: string with the first 3 characters specifying the product (MOD, MYD or VNP)
173 :return files: list of pairs with geolocation (03) and fire (14) file names in the path of the specific product
175 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
176 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
178 files=[Dict({'geo':k}) for k in glob.glob(path+'/'+reg+'03*')]
179 filesf=glob.glob(path+'/'+reg+'14*')
180 filesr=glob.glob(path+'/'+reg+'09*')
181 if len(filesf)>0:
182 for f in filesf:
183 mf=re.split("/",f)
184 if mf is not None:
185 m=mf[-1].split('.')
186 if m is not None:
187 for k,g in enumerate(files):
188 mmf=re.split("/",g.geo)
189 mm=mmf[-1].split('.')
190 if mm[0][1]==m[0][1] and mm[1]+'.'+mm[2]==m[1]+'.'+m[2]:
191 files[k].fire=f
192 if len(filesr)>0:
193 for f in filesr:
194 mf=re.split("/",f)
195 if mf is not None:
196 m=mf[-1].split('.')
197 if m is not None:
198 for k,g in enumerate(files):
199 mmf=re.split("/",g.geo)
200 mm=mmf[-1].split('.')
201 if mm[0][1]==m[0][1] and mm[1]+'.'+mm[2]==m[1]+'.'+m[2]:
202 files[k].ref=f
203 return files
205 def group_all(path):
207 Combine all the geolocation (03) and fire (14) files in a path
209 :param path: path to the geolocation (03) and fire (14) files
210 :return files: dictionary of products with a list of pairs with geolocation (03) and fire (14) file names in the path
212 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
213 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
215 files=Dict({})
216 # MOD files
217 modf=group_files(path,'MOD')
218 # MYD files
219 mydf=group_files(path,'MYD')
220 # VIIRS files
221 vif=group_files(path,'VNP')
222 files.MOD=modf
223 files.MYD=mydf
224 files.VNP=vif
225 return files
227 def read_modis_files(files,bounds):
229 Read the geolocation (03) and fire (14) files for MODIS products (MOD or MYD)
231 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
232 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
233 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
235 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
236 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
238 ret=Dict([])
239 # Satellite information
240 N=1354 # Number of columns (maxim number of sample)
241 h=705. # Altitude of the satellite in km
242 p=1. # Nadir pixel resolution in km
243 # Reading MODIS files
244 hdfg=SD(files.geo,SDC.READ)
245 hdff=SD(files.fire,SDC.READ)
246 # Creating all the objects
247 lat_obj=hdfg.select('Latitude')
248 lon_obj=hdfg.select('Longitude')
249 fire_obj=hdff.select('fire mask')
250 lat_fire_obj=hdff.select('FP_latitude')
251 lon_fire_obj=hdff.select('FP_longitude')
252 brig_fire_obj=hdff.select('FP_T21')
253 sample_fire_obj=hdff.select('FP_sample')
254 conf_fire_obj=hdff.select('FP_confidence')
255 t31_fire_obj=hdff.select('FP_T31')
256 frp_fire_obj=hdff.select('FP_power')
257 # Geolocation and mask information
258 ret.lat=lat_obj.get()
259 ret.lon=lon_obj.get()
260 ret.fire=fire_obj.get()
261 # Fire detected information
262 try:
263 flats=lat_fire_obj.get()
264 except:
265 flats=np.array([])
266 try:
267 flons=lon_fire_obj.get()
268 except:
269 flons=np.array([])
270 fll=np.logical_and(np.logical_and(np.logical_and(flons >= bounds[0], flons <= bounds[1]), flats >= bounds[2]), flats <= bounds[3])
271 ret.lat_fire=flats[fll]
272 ret.lon_fire=flons[fll]
273 try:
274 ret.brig_fire=brig_fire_obj.get()[fll]
275 except:
276 ret.brig_fire=np.array([])
277 ret.sat_fire=hdff.Satellite
278 try:
279 ret.conf_fire=conf_fire_obj.get()[fll]
280 except:
281 ret.conf_fire=np.array([])
282 try:
283 ret.t31_fire=t31_fire_obj.get()[fll]
284 except:
285 ret.t31_fire=np.array([])
286 try:
287 ret.frp_fire=frp_fire_obj.get()[fll]
288 except:
289 ret.frp_fire=np.array([])
290 try:
291 sf=sample_fire_obj.get()[fll]
292 except:
293 sf=np.array([])
294 ret.scan_angle_fire,ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p)
295 # No fire data
296 lats=np.ravel(ret.lat)
297 lons=np.ravel(ret.lon)
298 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
299 lats=lats[ll]
300 lons=lons[ll]
301 fire=np.ravel(ret.fire)
302 fire=fire[ll]
303 nf=np.logical_or(fire == 3, fire == 5)
304 ret.lat_nofire=lats[nf]
305 ret.lon_nofire=lons[nf]
306 sample=np.array([range(0,ret.lat.shape[1])]*ret.lat.shape[0])
307 sample=np.ravel(sample)
308 sample=sample[ll]
309 sfn=sample[nf]
310 ret.scan_angle_nofire,ret.scan_nofire,ret.track_nofire=pixel_dim(sfn,N,h,p)
311 # Close files
312 hdfg.end()
313 hdff.end()
314 return ret
316 def read_viirs_files(files,bounds):
318 Read the geolocation (03) and fire (14) files for VIIRS products (VNP)
320 :param files: pair with geolocation (03) and fire (14) file names for VIIRS products (VNP)
321 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
322 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
324 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
325 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
327 ret=Dict([])
328 # Satellite information
329 N=3200 # Number of columns (maxim number of sample)
330 h=828. # Altitude of the satellite in km
331 alpha=np.array([0,31.59,44.68,56.06])/180*np.pi
332 #p=(0.75+0.75/2+0.75/3)/3 # Nadir pixel resolution in km (mean in 3 different sections)
333 p=np.array([0.75,0.75/2,0.75/3])
334 # Reading VIIRS files
335 h5g=h5py.File(files.geo,'r')
336 ncf=nc.Dataset(files.fire,'r')
337 # Geolocation and mask information
338 ret.lat=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
339 ret.lon=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
340 ret.fire=np.array(ncf.variables['fire mask'][:])
341 # Fire detected information
342 flats=np.array(ncf.variables['FP_latitude'][:])
343 flons=np.array(ncf.variables['FP_longitude'][:])
344 fll=np.logical_and(np.logical_and(np.logical_and(flons >= bounds[0], flons <= bounds[1]), flats >= bounds[2]),flats <= bounds[3])
345 ret.lat_fire=flats[fll]
346 ret.lon_fire=flons[fll]
347 ret.brig_fire=np.array(ncf.variables['FP_T13'][:])[fll]
348 ret.sat_fire=ncf.SatelliteInstrument
349 ret.conf_fire=np.array(ncf.variables['FP_confidence'][:])[fll]
350 ret.t31_fire=np.array(ncf.variables['FP_T15'][:])[fll]
351 ret.frp_fire=np.array(ncf.variables['FP_power'][:])[fll]
352 sf=np.array(ncf.variables['FP_sample'][:])[fll]
353 ret.scan_angle_fire,ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p,alpha)
354 # No fire data
355 lats=np.ravel(ret.lat)
356 lons=np.ravel(ret.lon)
357 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
358 lats=lats[ll]
359 lons=lons[ll]
360 fire=np.ravel(ret.fire)
361 fire=fire[ll]
362 nf=np.logical_or(fire == 3, fire == 5)
363 ret.lat_nofire=lats[nf]
364 ret.lon_nofire=lons[nf]
365 sample=np.array([range(0,ret.lat.shape[1])]*ret.lat.shape[0])
366 sample=np.ravel(sample)
367 sample=sample[ll]
368 sfn=sample[nf]
369 ret.scan_angle_nofire,ret.scan_nofire,ret.track_nofire=pixel_dim(sfn,N,h,p,alpha)
370 # Reflectance data for burned scar algorithm
371 if 'ref' in files.keys():
372 # Read reflectance data
373 hdf=SD(files.ref,SDC.READ)
374 # Bands data
375 M7=hdf.select('750m Surface Reflectance Band M7') # 0.86 nm
376 M8=hdf.select('750m Surface Reflectance Band M8') # 1.24 nm
377 M10=hdf.select('750m Surface Reflectance Band M10') # 1.61 nm
378 M11=hdf.select('750m Surface Reflectance Band M11') # 2.25 nm
379 bands=Dict({})
380 bands.M7=M7.get()*1e-4
381 bands.M8=M8.get()*1e-4
382 bands.M10=M10.get()*1e-4
383 bands.M11=M11.get()*1e-4
384 # Burned scar mask using the burned scar granule algorithm
385 ret.burned=burned_algorithm(bands)
386 # Close reflectance file
387 hdf.end()
388 # Close files
389 h5g.close()
390 ncf.close()
391 return ret
393 def read_viirs375_files(path,bounds):
395 Read the geolocation and fire information from VIIRS CSV files (fire_archive_*.csv and/or fire_nrt_*.csv)
397 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
398 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
400 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
401 Angel Farguell (angel.farguell@gmail.com), 2018-10-23
403 import pandas as pd
404 # Opening files if they exist
405 f1=glob.glob(path+'/fire_archive_*.csv')
406 f2=glob.glob(path+'/fire_nrt_*.csv')
407 if len(f1)==1:
408 df1=pd.read_csv(f1[0])
409 if len(f2)==1:
410 df2=pd.read_csv(f2[0])
411 dfs=pd.concat([df1,df2],sort=True,ignore_index=True)
412 else:
413 dfs=df1
414 else:
415 if len(f2)==1:
416 dfs=pd.read_csv(f2[0])
417 else:
418 return {}
420 ret=Dict({})
421 # In the case something exists, read all the information from the CSV files
422 dfs=dfs[(dfs['longitude']>bounds[0]) & (dfs['longitude']<bounds[1]) & (dfs['latitude']>bounds[2]) & (dfs['latitude']<bounds[3])]
423 date=np.array(dfs['acq_date'])
424 time=np.array(dfs['acq_time'])
425 dfs['time']=np.array(['%s_%04d' % (date[k],time[k]) for k in range(len(date))])
426 dfs['time']=pd.to_datetime(dfs['time'], format='%Y-%m-%d_%H%M')
427 dfs['datetime']=dfs['time']
428 dfs=dfs.set_index('time')
429 for group_name, df in dfs.groupby(pd.TimeGrouper("D")):
430 items=Dict([])
431 items.lat=np.array(df['latitude'])
432 items.lon=np.array(df['longitude'])
433 conf=np.array(df['confidence'])
434 firemask=np.zeros(conf.shape)
435 conf_fire=np.zeros(conf.shape)
436 firemask[conf=='l']=7
437 conf_fire[conf=='l']=30.
438 firemask[conf=='n']=8
439 conf_fire[conf=='n']=60.
440 firemask[conf=='h']=9
441 conf_fire[conf=='h']=90.
442 items.fire=firemask.astype(int)
443 items.lat_fire=items.lat
444 items.lon_fire=items.lon
445 items.brig_fire=np.array(df['bright_ti4'])
446 items.sat_fire='Suomi NPP'
447 items.conf_fire=conf_fire
448 items.t31_fire=np.array(df['bright_ti5'])
449 items.frp_fire=np.array(df['frp'])
450 items.scan_fire=np.array(df['scan'])
451 items.track_fire=np.array(df['track'])
452 items.scan_angle_fire=np.ones(items.scan_fire.shape)*np.nan
453 items.lat_nofire=np.array([])
454 items.lon_nofire=np.array([])
455 items.scan_angle_nofire=np.array([])
456 items.scan_nofire=np.array([])
457 items.track_nofire=np.array([])
458 items.instrument=df['instrument'][0]
459 dt=df['datetime'][0]
460 items.time_start_geo_iso='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second)
461 items.time_num=time_iso2num(items.time_start_geo_iso)
462 items.acq_date='%02d-%02d-%02d' % (dt.year,dt.month,dt.day)
463 items.acq_time='%02d%02d' % (dt.hour,dt.minute)
464 items.time_start_fire_iso=items.time_start_geo_iso
465 items.time_end_geo_iso=items.time_start_geo_iso
466 items.time_end_fire_iso=items.time_start_geo_iso
467 items.file_geo=f1+f2
468 items.file_fire=items.file_geo
469 tt=df['datetime'][0].timetuple()
470 id='VNPH_A%04d%03d_%02d%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour,tt.tm_min)
471 items.prefix='VNPH'
472 items.name='A%04d%03d_%02d%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour,tt.tm_min)
473 ret.update({id: items})
474 return ret
476 def read_goes_files(files):
478 Read the files for GOES products - geolocation and fire data already included (OS)
480 :param files: pair with geolocation (03) and fire (14) file names for MODIS products (MOD or MYD)
481 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
482 :return ret: dictionary with Latitude, Longitude and fire mask arrays read
484 Developed in Python 2.7.15 :: Anaconda 4.5.10, on WINDOWS10.
485 Lauren Hearn (lauren@robotlauren.com), 2018-10-16
487 h5g=h5py.File(files.geo,'r')
488 ret=Dict([])
489 ret.lat=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Latitude'])
490 ret.lon=np.array(h5g['HDFEOS']['SWATHS']['VNP_750M_GEOLOCATION']['Geolocation Fields']['Longitude'])
491 ncf=nc.Dataset(files.fire,'r')
492 ret.fire=np.array(ncf.variables['fire mask'][:])
493 ret.lat_fire=np.array(ncf.variables['FP_latitude'][:])
494 ret.lon_fire=np.array(ncf.variables['FP_longitude'][:])
495 ret.brig_fire=np.array(ncf.variables['FP_T13'][:])
496 sf=np.array(ncf.variables['FP_sample'][:])
497 # Satellite information
498 N=2500 # Number of columns (maxim number of sample)
499 h=35786. # Altitude of the satellite in km
500 p=2. # Nadir pixel resolution in km
501 ret.scan_fire,ret.track_fire=pixel_dim(sf,N,h,p)
502 ret.sat_fire=ncf.SatelliteInstrument
503 ret.conf_fire=np.array(ncf.variables['FP_confidence'][:])
504 ret.t31_fire=np.array(ncf.variables['FP_T15'][:])
505 ret.frp_fire=np.array(ncf.variables['FP_power'][:])
506 return ret
508 def read_data(files,file_metadata,bounds):
510 Read all the geolocation (03) and fire (14) files and if necessary, the reflectance (09) files
512 MODIS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/archive/mod14_v5_user_guide.pdf
513 MOD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
514 MYD14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.hdf
515 Where:
516 YYYYDDD = year and Julian day (001-366) of data acquisition
517 HHMM = hour and minute of data acquisition (approximate beginning time)
518 vvv = version number
519 yyyyddd = year and Julian day of data processing
520 hhmmss = hour, minute, and second of data processing
522 VIIRS file names according to https://lpdaac.usgs.gov/sites/default/files/public/product_documentation/vnp14_user_guide_v1.3.pdf
523 VNP14IMG.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
524 VNP14.AYYYYDDD.HHMM.vvv.yyyydddhhmmss.nc
525 Where:
526 YYYYDDD = year and Julian day (001-366) of data acquisition
527 HHMM = hour and minute of data acquisition (approximate beginning time)
528 vvv = version number
529 yyyyddd = year and Julian day of data processing
530 hhmmss = hour, minute, and second of data processing
532 :param files: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
533 :param file_metadata: dictionary with file names as key and granules metadata as values
534 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
535 :return data: dictionary with Latitude, Longitude and fire mask arrays read
537 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
538 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
540 print "read_data files=%s" % files
541 data=Dict([])
542 if files=='VIIRS375':
543 data.update(read_viirs375_files('.',bounds))
544 else:
545 for f in files:
546 print "read_data f=%s" % f
547 if 'geo' in f.keys():
548 f0=os.path.basename(f.geo)
549 else:
550 print 'ERROR: read_data cannot read files=%s, not geo file' % f
551 continue
552 if 'fire' in f.keys():
553 f1=os.path.basename(f.fire)
554 else:
555 print 'ERROR: read_data cannot read files=%s, not fire file' % f
556 continue
557 boo=True
558 if 'ref' in f.keys():
559 f2=os.path.basename(f.ref)
560 boo=False
561 prefix = f0[:3]
562 print 'prefix %s' % prefix
563 if prefix != f1[:3]:
564 print 'ERROR: the prefix of %s %s must coincide' % (f0,f1)
565 continue
566 m=f.geo.split('/')
567 mm=m[-1].split('.')
568 key=mm[1]+'_'+mm[2]
569 id = prefix + '_' + key
570 print "id " + id
571 if prefix=="MOD" or prefix=="MYD":
572 try:
573 item=read_modis_files(f,bounds)
574 item.instrument="MODIS"
575 except Exception as e:
576 print 'WARNING: reading the files from MODIS failed with error %s' % e
577 continue
578 elif prefix=="VNP":
579 try:
580 item=read_viirs_files(f,bounds)
581 item.instrument="VIIRS"
582 except Exception as e:
583 print 'WARNING: reading the files from VIIRS failed with error %s' % e
584 continue
585 elif prefix=="OR":
586 try:
587 item=read_goes_files(f)
588 item.instrument="GOES"
589 except Exception as e:
590 print 'WARNING: reading the files from GOES failed with error %s' % e
591 continue
592 else:
593 print 'ERROR: the prefix of %s %s must be MOD, MYD, or VNP' % (f0,f1)
594 continue
595 if not boo:
596 if f2 in file_metadata.keys():
597 boo=True
598 if (f0 in file_metadata.keys()) and (f1 in file_metadata.keys()) and boo:
599 # connect the file back to metadata
600 item.time_start_geo_iso=file_metadata[f0]["time_start"]
601 item.time_num=time_iso2num(item.time_start_geo_iso)
602 dt=datetime.datetime.strptime(item.time_start_geo_iso[0:18],'%Y-%m-%dT%H:%M:%S')
603 item.acq_date='%02d-%02d-%02d' % (dt.year,dt.month,dt.day)
604 item.acq_time='%02d%02d' % (dt.hour,dt.minute)
605 item.time_start_fire_iso=file_metadata[f1]["time_start"]
606 item.time_end_geo_iso=file_metadata[f0]["time_end"]
607 item.time_end_fire_iso=file_metadata[f1]["time_end"]
608 item.file_geo=f0
609 item.file_fire=f1
610 if 'ref' in f.keys():
611 item.file_ref=f2
612 item.time_start_ref_iso=file_metadata[f2]["time_start"]
613 item.time_end_ref_iso=file_metadata[f2]["time_end"]
614 item.prefix=prefix
615 item.name=key
616 data.update({id:item})
617 else:
618 print 'WARNING: file %s or %s not found in downloaded metadata, ignoring both' % (f0, f1)
619 continue
621 return data
623 def get_url(url, filename, max_retries=3, appkey=None):
625 General get URL recursive function
627 :param url: URL to be downloaded
628 :param filename: local file name
629 :param appkey: optional, if download key is necessary
630 :return s: status of the request
632 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
633 Angel Farguell (angel.farguell@gmail.com) 2020-05-28
635 chunk_size = 1024*1024
636 s = 0
637 print 'downloading %s as %s' % (url,filename)
638 if appkey:
639 r = requests.get(url, stream=True, headers={"Authorization": "Bearer %s" % appkey})
640 else:
641 r = requests.get(url, stream=True)
642 if r.status_code == 200:
643 content_size = int(r.headers['Content-Length'])
644 print 'downloading %s as %s size %sB' % (url, filename, content_size)
645 with open(filename, 'wb') as f:
646 for chunk in r.iter_content(chunk_size):
647 f.write(chunk)
648 s = s + len(chunk)
649 print('downloaded %sB of %sB' % (s, content_size))
650 else:
651 if max_retries != 0:
652 get_url(url,max_retries-1,appkey)
653 return r.status_code
656 def download(granules, appkey=None):
658 Download files as listed in the granules metadata
660 :param granules: list of products with a list of pairs with geolocation (03) and fire (14) file names in the path
661 :return file_metadata: dictionary with file names as key and granules metadata as values
663 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
664 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
666 file_metadata = {}
667 for gn,granule in enumerate(granules):
668 #print json.dumps(granule,indent=4, separators=(',', ': '))
669 url = granule['links'][0]['href']
670 filename=os.path.basename(urlparse.urlsplit(url).path)
671 file_metadata[filename]=granule
673 # download - a minimal code without various error checking and corrective actions
674 # see wrfxpy/src/ingest/downloader.py
675 if os.path.isfile(filename):
676 print 'file %s already downloaded' % filename
677 continue
678 try:
679 s = get_url(url,filename,appkey)
680 if s != 200:
681 if gn == 0:
682 print 'cannot connect to %s' % url
683 print 'web request status code %s' % r.status_code
684 print 'Make sure you have file ~/.netrc permission 600 with the content'
685 print 'machine urs.earthdata.nasa.gov\nlogin yourusername\npassword yourpassword'
686 sys.exit(1)
687 else:
688 print 'something happened when trying to download %s with status code %s' % (url,r.status_code)
690 except Exception as e:
691 print 'download failed with error %s' % e
692 return file_metadata
694 def download_GOES16(time):
696 Download the GOES16 data through rclone application
698 :param time: tupple with the start and end times
699 :return bucket: dictionary of all the data downloaded and all the GOES16 data downloaded in the same directory level
701 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
702 Angel Farguell (angel.farguell@gmail.com) 2018-10-12
704 bucket=Dict({})
705 dts=[datetime.datetime.strptime(d,'%Y-%m-%dT%H:%M:%SZ') for d in time]
706 delta=dts[1]-dts[0]
707 nh=int(delta.total_seconds()/3600)
708 dates=[dts[0]+datetime.timedelta(seconds=3600*k) for k in range(1,nh+1)]
709 for date in dates:
710 tt=date.timetuple()
711 argT='%d/%03d/%02d' % (tt.tm_year,tt.tm_yday,tt.tm_hour)
712 try:
713 args=['rclone','copyto','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT,'.','-L']
714 print 'running: '+' '.join(args)
715 res=call(args)
716 print 'goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT+' downloaded.'
717 args=['rclone','ls','goes16aws:noaa-goes16/ABI-L2-MCMIPC/'+argT,'-L']
718 out=check_output(args)
719 bucket.update({argT : [o.split(' ')[2] for o in out.split('\n')[:-1]]})
720 except Exception as e:
721 print 'download failed with error %s' % e
722 return bucket
724 def retrieve_af_data(bbox,time,burned=False,high=False,appkey=None):
726 Retrieve the data in a bounding box coordinates and time interval and save it in a Matlab structure inside the out.mat Matlab file
728 :param bbox: polygon with the search bounding box
729 :param time: time interval (init_time,final_time)
730 :return data: dictonary with all the data and out.mat Matlab file with a Matlab structure of the dictionary
732 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
733 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
736 # Define settings
737 lonmin,lonmax,latmin,latmax = bbox
738 bounds=bbox
739 bbox = [(lonmin,latmax),(lonmin,latmin),(lonmax,latmin),(lonmax,latmax),(lonmin,latmax)]
740 maxg = 1000
742 print "bbox"
743 print bbox
744 print "time:"
745 print time
746 print "maxg:"
747 print maxg
749 # Get data
750 granules=get_meta(bbox,time,maxg,burned=burned,high=high)
751 #print 'medatada found:\n' + json.dumps(granules,indent=4, separators=(',', ': '))
753 # Eliminating the NRT data (repeated always)
754 nrt_elimination(granules)
756 file_metadata = {}
757 for k,g in granules.items():
758 print 'Downloading %s files' % k
759 sys.stdout.flush()
760 file_metadata.update(download(g,appkey=appkey))
761 #print "download g:"
762 #print g
764 print "download complete"
766 # Group all files downloaded
767 files=group_all(".")
768 #print "group all files:"
769 #print files
771 # Generate data dictionary
772 data=Dict({})
773 data.update(read_data(files.MOD,file_metadata,bounds))
774 data.update(read_data(files.MYD,file_metadata,bounds))
775 data.update(read_data(files.VNP,file_metadata,bounds))
776 #data.update(read_data('VIIRS375','',bounds))
778 return data
780 def files2metadata(files):
782 Get necessary metadata information from granules
784 :param files: dictionary with MOD, MYD, and VNP keys and arrays of two files (geo and fire)
785 :return data: dictonary with all the data
787 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
788 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2020-05-07
790 print "processing files2metadata"
791 file_metadata=Dict([])
792 for key in files.keys():
793 print "key " + key
794 file_metadata[key]=Dict([])
795 if key in ['MOD','MYD']:
796 for f in files[key]:
797 print "files=%s" % f
798 f0 = os.path.basename(f.geo)
799 f1 = os.path.basename(f.fire)
800 file_metadata[key][f0] = Dict([])
801 file_metadata[key][f1] = Dict([])
802 hdfg = SD(f.geo,SDC.READ)
803 hdff = SD(f.fire,SDC.READ)
804 meta = hdfg.attributes()['CoreMetadata.0']
805 date = meta.split('RANGEBEGINNINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
806 time = meta.split('RANGEBEGINNINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
807 file_metadata[key][f0]["time_start"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date[:4],date[5:7],date[8:10],time[:2],time[3:5],time[6:8]]))
808 date = meta.split('RANGEENDINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
809 time = meta.split('RANGEENDINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
810 file_metadata[key][f0]["time_end"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date[:4],date[5:7],date[8:10],time[:2],time[3:5],time[6:8]]))
811 meta = hdff.attributes()['CoreMetadata.0']
812 date = meta.split('RANGEBEGINNINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
813 time = meta.split('RANGEBEGINNINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
814 file_metadata[key][f1]["time_start"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date[:4],date[5:7],date[8:10],time[:2],time[3:5],time[6:8]]))
815 date = meta.split('RANGEENDINGDATE')[1].split('VALUE')[1].split('=')[1].split('"')[1]
816 time = meta.split('RANGEENDINGTIME')[1].split('VALUE')[1].split('=')[1].split('"')[1]
817 file_metadata[key][f1]["time_end"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date[:4],date[5:7],date[8:10],time[:2],time[3:5],time[6:8]]))
818 hdfg.end()
819 hdff.end()
820 if key == 'VNP':
821 for f in files[key]:
822 print "files=%s" % f
823 f0 = os.path.basename(f.geo)
824 f1 = os.path.basename(f.fire)
825 file_metadata[key][f0] = Dict([])
826 file_metadata[key][f1] = Dict([])
827 h5g = h5py.File(f.geo,'r')
828 ncf = nc.Dataset(f.fire,'r')
829 date = h5g['HDFEOS'].attrs['BeginningDate']
830 time = h5g['HDFEOS'].attrs['BeginningTime']
831 file_metadata[key][f0]["time_start"]='%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date[:4],date[4:6],date[6:8],time[:2],time[2:4],time[4:6]]))
832 date = h5g['HDFEOS'].attrs['EndingDate']
833 time = h5g['HDFEOS'].attrs['EndingTime']
834 file_metadata[key][f0]["time_end"] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % tuple(map(int,[date[:4],date[4:6],date[6:8],time[:2],time[2:4],time[4:6]]))
835 file_metadata[key][f1]["time_start"] = ncf.getncattr('StartTime')[:19].replace(' ','T')+'Z'
836 file_metadata[key][f1]["time_end"] = ncf.getncattr('EndTime')[:19].replace(' ','T')+'Z'
837 h5g.close()
838 ncf.close()
839 return file_metadata
841 def process_data(path,bounds):
843 Process the data from a path in a bounding box coordinates
845 :param path: path where all the granules are downloaded
846 :param bbox: polygon with the search bounding box
847 :return data: dictonary with all the data and out.mat Matlab file with a Matlab structure of the dictionary
849 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
850 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2020-05-07
852 files = group_all(path)
853 file_metadata = files2metadata(files)
854 data=Dict({})
855 data.update(read_data(files.MOD,file_metadata.MOD,bounds))
856 data.update(read_data(files.MYD,file_metadata.MYD,bounds))
857 data.update(read_data(files.VNP,file_metadata.VNP,bounds))
859 return data
861 def nrt_elimination(granules):
863 Cleaning all the NRT data which is repeated
865 :param granules: Dictionary of granules products to clean up
866 :return: It will update the granules dictionary
868 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
869 Angel Farguell (angel.farguell@gmail.com) and Jan Mandel (jan.mandel@ucdenver.edu) 2018-11-30
872 if 'MOD14' in granules:
873 nlist=[g for g in granules['MOD14'] if g['data_center']=='LPDAAC_ECS']
874 granules['MOD14']=nlist
875 if 'MYD14' in granules:
876 nlist=[g for g in granules['MYD14'] if g['data_center']=='LPDAAC_ECS']
877 granules['MYD14']=nlist
880 def read_fire_mesh(filename):
882 Read necessary variables in the fire mesh of the wrfout file filename
884 :param filename: wrfout file
885 :return fxlon: lon coordinates in the fire mesh
886 :return fxlat: lat coordinates in the fire mesh
887 :return bbox: bounding box
888 :return time_esmf: simulation times in ESMF format
890 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
891 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
893 print 'opening ' + filename
894 d = nc.Dataset(filename)
895 m,n = d.variables['XLONG'][0,:,:].shape
896 fm,fn = d.variables['FXLONG'][0,:,:].shape
897 fm=fm-fm/(m+1) # dimensions corrected for extra strip
898 fn=fn-fn/(n+1)
899 fxlon = np.array(d.variables['FXLONG'][0,:fm,:fn]) # masking extra strip
900 fxlat = np.array(d.variables['FXLAT'][0,:fm,:fn])
901 time_esmf = ''.join(d.variables['Times'][:][0]) # date string as YYYY-MM-DD_hh:mm:ss
902 bbox = [fxlon.min(),fxlon.max(),fxlat.min(),fxlat.max()]
903 print 'min max longitude latitude %s' % bbox
904 print 'time (ESMF) %s' % time_esmf
906 plot = False
907 if plot:
908 tign_g = np.array(d.variables['TIGN_G'][0,:fm,:fn])
909 plot_3D(fxlon,fxlat,tign_g)
911 d.close()
913 return fxlon,fxlat,bbox,time_esmf
915 def data2json(data,keys,dkeys,N):
917 Create a json dictionary from data dictionary
919 :param data: dictionary with Latitude, Longitude and fire mask arrays and metadata information
920 :param keys: keys which are going to be included into the json
921 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
922 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
923 :return ret: dictionary with all the fire detection information to create the KML file
925 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
926 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
928 ret=Dict({})
929 for i,k in enumerate(keys):
930 if isinstance(data[list(data)[0]][dkeys[i]],(list, tuple, np.ndarray)):
931 dd=[np.ravel(data[d][dkeys[i]]) for d in list(data)]
932 ret.update({k : np.concatenate(dd)})
933 else:
934 dd=[[data[d[1]][dkeys[i]]]*N[d[0]] for d in enumerate(list(data))]
935 ret.update({k : np.concatenate(dd)})
937 return ret
939 def sdata2json(sdata,keys,dkeys,N):
941 Create a json dictionary from sorted array of data dictionaries
943 :param sdata: sorted array of data dictionaries with Latitude, Longitude and fire mask arrays and metadata information
944 :param keys: keys which are going to be included into the json
945 :param dkeys: keys in the data dictionary which correspond to the previous keys (same order)
946 :param N: number of entries in each instance of the json dictionary (used for the variables with only one entry in the data dictionary)
947 :return ret: dictionary with all the fire detection information to create the KML file
949 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
950 Angel Farguell (angel.farguell@gmail.com), 2018-12-03
952 ret=Dict({'granules': [d[0] for d in sdata]})
953 for i,k in enumerate(keys):
954 dd = [d[1][dkeys[i]] if dkeys[i] in d[1] else None for d in sdata]
955 if dd:
956 if np.any([isinstance(d,(list, tuple, np.ndarray)) for d in dd]):
957 out = [d if d is not None else np.array([]) for d in dd]
958 ret.update({k : dd})
959 else:
960 out = [[d[1][1][dkeys[i]]]*N[d[0]] if dkeys[i] in d[1][1] else [] for d in enumerate(sdata)]
961 ret.update({k : out})
963 return ret
966 def write_csv(d,bounds):
968 Write fire detections from data dictionary d to a CSV file
970 :param d: dictionary with Latitude, Longitude and fire mask arrays and metadata information
971 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
972 :return: fire_detections.csv file with all the detections
974 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
975 Angel Farguell (angel.farguell@gmail.com), 2018-09-17
977 import pandas as pd
978 df=pd.DataFrame(data=d)
979 df=df[(df['longitude']>bounds[0]) & (df['longitude']<bounds[1]) & (df['latitude']>bounds[2]) & (df['latitude']<bounds[3])]
980 df.to_csv('fire_detections.csv', encoding='utf-8', index=False)
982 def plot_3D(xx,yy,zz):
984 Plot surface of (xx,yy,zz) data
986 :param xx: x arrays
987 :param yy: y arrays
988 :param zz: values at the (x,y) points
989 :return: a plot show of the 3D data
991 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
992 Angel Farguell (angel.farguell@gmail.com) 2018-09-21
994 from mpl_toolkits.mplot3d import Axes3D
995 import matplotlib.pyplot as plt
996 from matplotlib import cm
997 fig = plt.figure()
998 ax = fig.gca(projection='3d')
999 surf = ax.plot_surface(xx,yy,zz,cmap=cm.coolwarm)
1000 plt.show()
1002 def time_iso2num(time_iso):
1004 Transform an iso time string to a time integer number of seconds since December 31 1969 at 17:00:00
1006 :param time_iso: string iso date
1007 :return s: integer number of seconds since December 31 1969 at 17:00:00
1009 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1010 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1012 time_datetime=datetime.datetime.strptime(time_iso[0:19],'%Y-%m-%dT%H:%M:%S')
1013 # seconds since December 31 1969 at 17:00:00
1014 s=time.mktime(time_datetime.timetuple())
1015 return s
1017 def time_iso2datetime(time_iso):
1019 Transform an iso time string to a datetime element
1021 :param time_iso: string iso date
1022 :return time_datetime: datetime element
1024 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1025 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1027 time_datetime=datetime.datetime.strptime(time_iso[0:19],'%Y-%m-%dT%H:%M:%S')
1028 return time_datetime
1030 def time_datetime2iso(time_datetime):
1032 Transform a datetime element to iso time string
1034 :param time_datetime: datetime element
1035 :return time_iso: string iso date
1037 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1038 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
1040 time_iso='%02d-%02d-%02dT%02d:%02d:%02dZ' % (time_datetime.year,time_datetime.month,
1041 time_datetime.day,time_datetime.hour,
1042 time_datetime.minute,time_datetime.second)
1043 return time_iso
1045 def time_num2iso(time_num):
1047 Transform a time integer number of seconds since December 31 1969 at 17:00:00 to an iso time string
1049 :param time_num: integer number of seconds since December 31 1969 at 17:00:00
1050 :return date: time string in ISO date
1052 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1053 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
1055 dt=datetime.datetime.fromtimestamp(time_num)
1056 # seconds since December 31 1969 at 17:00:00
1057 date='%02d-%02d-%02dT%02d:%02d:%02dZ' % (dt.year,dt.month,dt.day,dt.hour,dt.minute,dt.second)
1058 return date
1060 def pixel_dim(sample,N,h,p,a=None):
1062 Computes pixel dimensions (along-scan and track pixel sizes)
1064 :param sample: array of integers with the column number (sample variable in files)
1065 :param N: scalar, total number of pixels in each row of the image swath
1066 :param h: scalar, altitude of the satellite in km
1067 :param p: scalar, pixel nadir resolution in km
1068 :param a: array of floats of the size of p with the angles where the resolution change
1069 :return theta: scan angle in radiands
1070 :return scan: along-scan pixel size in km
1071 :return track: along-track pixel size in km
1073 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1074 Angel Farguell (angel.farguell@gmail.com) 2018-10-01
1076 Re=6378 # approximation of the radius of the Earth in km
1077 r=Re+h
1078 M=(N-1)*0.5
1079 s=np.arctan(p/h) # trigonometry (deg/sample)
1080 if isinstance(p,(list, tuple, np.ndarray)):
1081 Ns=np.array([int((a[k]-a[k-1])/s[k-1]) for k in range(1,len(a)-1)])
1082 Ns=np.append(Ns,int(M-Ns.sum()))
1083 theta=s[0]*(sample-M)
1084 scan=Re*s[0]*(np.cos(theta)/np.sqrt((Re/r)**2-np.square(np.sin(theta)))-1)
1085 track=r*s[0]*(np.cos(theta)-np.sqrt((Re/r)**2-np.square(np.sin(theta))))
1086 for k in range(1,len(Ns)):
1087 p=sample<=M-Ns[0:k].sum()
1088 theta[p]=s[k]*(sample[p]-(M-Ns[0:k].sum()))-(s[0:k]*Ns[0:k]).sum()
1089 scan[p]=Re*np.mean(s)*(np.cos(theta[p])/np.sqrt((Re/r)**2-np.square(np.sin(theta[p])))-1)
1090 track[p]=r*np.mean(s)*(np.cos(theta[p])-np.sqrt((Re/r)**2-np.square(np.sin(theta[p]))))
1091 p=sample>=M+Ns[0:k].sum()
1092 theta[p]=s[k]*(sample[p]-(M+Ns[0:k].sum()))+(s[0:k]*Ns[0:k]).sum()
1093 scan[p]=Re*np.mean(s)*(np.cos(theta[p])/np.sqrt((Re/r)**2-np.square(np.sin(theta[p])))-1)
1094 track[p]=r*np.mean(s)*(np.cos(theta[p])-np.sqrt((Re/r)**2-np.square(np.sin(theta[p]))))
1095 else:
1096 theta=s*(sample-M)
1097 scan=Re*s*(np.cos(theta)/np.sqrt((Re/r)**2-np.square(np.sin(theta)))-1)
1098 track=r*s*(np.cos(theta)-np.sqrt((Re/r)**2-np.square(np.sin(theta))))
1099 return (theta,scan,track)
1101 def copyto(partial_path,kml):
1103 Copy information in partial_path to kml
1105 :param partial_path: path to a partial KML file
1106 :param kml: KML object where to write to
1107 :return: information from partial_path into kml
1109 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1110 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1112 with open(partial_path,'r') as part:
1113 for line in part:
1114 kml.write(line)
1116 def json2kml(d,kml_path,bounds,prods,opt='granule',minconf=80.):
1118 Creates a KML file kml_path from a dictionary d
1120 :param d: dictionary with all the fire detection information to create the KML file
1121 :param kml_path: path in where the KML file is going to be written
1122 :param bounds: spatial bounds tuple (lonmin,lonmax,latmin,latmax)
1123 :return: a KML file
1125 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1126 Jan Mandel (jan.mandel@ucdenver.edu) 2018-09-17
1129 if 'latitude' in d:
1130 if not isinstance(d['latitude'][0],(list, tuple, np.ndarray)):
1131 if opt=='dates':
1132 ind=[i[0] for i in sorted(enumerate(d.acq_date), key=lambda x:x[1])]
1133 L=[len(list(grp)) for k, grp in groupby(d['acq_date'][ind])]
1134 L.insert(0,0)
1135 ll=[sum(L[0:k+1]) for k in range(len(L))]
1136 for v in list(d):
1137 sort=[d[v][i] for i in ind]
1138 d[v]=[sort[ll[k]:ll[k+1]] for k in range(len(ll)-1)]
1139 else:
1140 for v in list(d):
1141 d[v]=[d[v]]
1142 opt='pixels'
1144 frp_style={-1:'modis_frp_no_data',40:'modis_frp_gt_400'}
1145 for i in range(0,40):
1146 frp_style[i]='modis_frp_%s_to_%s' % (i*10, i*10+10)
1148 with open(kml_path,'w') as kml:
1150 copyto('kmls/partial1.kml',kml)
1152 # write bounding box
1153 kml.write('<Folder>\n'
1154 +'<name>Bounding Box</name>\n'
1155 +'<Placemark>\n'
1156 +'<name>Bounds</name>\n'
1157 +'<description>Bounding box of the fire detections</description>\n'
1158 +'<Style>\n'
1159 +'<LabelStyle><color>00000000</color><scale>0.000000</scale></LabelStyle>\n'
1160 +'<LineStyle><color>ff0000e6</color><width>4.000000</width></LineStyle>\n'
1161 +'<PolyStyle><color>00f0f0f0</color><outline>1</outline></PolyStyle>\n'
1162 +'</Style>\n'
1163 +'<MultiGeometry>\n'
1164 +'<Polygon>\n'
1165 +'<extrude>0</extrude><altitudeMode>clampToGround</altitudeMode>\n'
1166 +'<outerBoundaryIs><LinearRing><coordinates>\n'
1167 +'%s,%s,0\n' % (bounds[0],bounds[2])
1168 +'%s,%s,0\n' % (bounds[1],bounds[2])
1169 +'%s,%s,0\n' % (bounds[1],bounds[3])
1170 +'%s,%s,0\n' % (bounds[0],bounds[3])
1171 +'%s,%s,0\n' % (bounds[0],bounds[2])
1172 +'</coordinates></LinearRing></outerBoundaryIs>\n'
1173 +'</Polygon>\n'
1174 +'</MultiGeometry>\n'
1175 +'</Placemark>\n'
1176 +'</Folder>\n')
1178 # set some constants
1179 r = 6378 # Earth radius
1180 km_lat = 180/(np.pi*r) # 1km in degrees latitude
1181 ND = len(d['latitude'])
1183 for prod in prods:
1185 kml.write('<Folder>\n')
1186 kml.write('<name>%s</name>\n' % prods[prod])
1188 if prod == 'FRP':
1189 copyto('kmls/partial2.kml',kml)
1191 if prod == 'TF':
1192 col = np.flip(np.divide([(230, 25, 75, 150), (245, 130, 48, 150), (255, 255, 25, 150),
1193 (210, 245, 60, 150), (60, 180, 75, 150), (70, 240, 240, 150),
1194 (0, 0, 128, 150), (145, 30, 180, 150), (240, 50, 230, 150),
1195 (128, 128, 128, 150)],255.),0)
1196 cm = colors.LinearSegmentedColormap.from_list('BuRd',col,ND)
1197 cols = ['%02x%02x%02x%02x' % tuple(255*np.flip(c)) for c in cm(range(cm.N))]
1198 t_range = range(ND-1,-1,-1)
1199 else:
1200 t_range = range(ND)
1202 for t in t_range:
1203 lats=np.array(d['latitude'][t]).astype(float)
1204 lons=np.array(d['longitude'][t]).astype(float)
1205 ll=np.logical_and(np.logical_and(np.logical_and(lons >= bounds[0], lons <= bounds[1]), lats >= bounds[2]), lats <= bounds[3])
1206 latitude=lats[ll]
1207 longitude=lons[ll]
1208 NN=len(latitude)
1209 if NN > 0:
1210 acq_date=np.array(d['acq_date'][t])[ll]
1211 acq_time=np.array(d['acq_time'][t])[ll]
1212 try:
1213 satellite=np.array(d['satellite'][t])[ll]
1214 except:
1215 satellite=np.array(['Not available']*NN)
1216 try:
1217 instrument=np.array(d['instrument'][t])[ll]
1218 except:
1219 instrument=np.array(['Not available']*NN)
1220 try:
1221 confidence=np.array(d['confidence'][t])[ll].astype(float)
1222 except:
1223 confidence=np.array(np.zeros(NN)).astype(float)
1224 try:
1225 frps=np.array(d['frp'][t])[ll].astype(float)
1226 except:
1227 frps=np.array(np.zeros(NN)).astype(float)
1228 try:
1229 angles=np.array(d['scan_angle'][t])[ll].astype(float)
1230 except:
1231 angles=np.array(['Not available']*NN)
1232 try:
1233 scans=np.array(d['scan'][t])[ll].astype(float)
1234 except:
1235 scans=np.ones(NN).astype(float)
1236 try:
1237 tracks=np.array(d['track'][t])[ll].astype(float)
1238 except:
1239 tracks=np.ones(NN).astype(float)
1241 kml.write('<Folder>\n')
1242 if opt=='date':
1243 kml.write('<name>%s</name>\n' % acq_date[0])
1244 elif opt=='granule':
1245 kml.write('<name>%s</name>\n' % d['granules'][t])
1246 else:
1247 kml.write('<name>Pixels</name>\n')
1249 for p in range(NN):
1250 lat=latitude[p]
1251 lon=longitude[p]
1252 conf=confidence[p]
1253 if conf >= minconf:
1254 frp=frps[p]
1255 angle=angles[p]
1256 scan=scans[p]
1257 track=tracks[p]
1258 timestamp=acq_date[p] + 'T' + acq_time[p][0:2] + ':' + acq_time[p][2:4] + ':00Z'
1259 timedescr=acq_date[p] + ' ' + acq_time[p][0:2] + ':' + acq_time[p][2:4] + ':00 UTC'
1261 if prod == 'NF':
1262 kml.write('<Placemark>\n<name>Ground detection square</name>\n')
1263 kml.write('<description>\nlongitude: %s\n' % lon
1264 + 'latitude: %s\n' % lat
1265 + 'time: %s\n' % timedescr
1266 + 'satellite: %s\n' % satellite[p]
1267 + 'instrument: %s\n' % instrument[p]
1268 + 'scan angle: %s\n' % angle
1269 + 'along-scan: %s\n' % scan
1270 + 'along-track: %s\n' % track
1271 + '</description>\n')
1272 else:
1273 kml.write('<Placemark>\n<name>Fire detection square</name>\n')
1274 kml.write('<description>\nlongitude: %s\n' % lon
1275 + 'latitude: %s\n' % lat
1276 + 'time: %s\n' % timedescr
1277 + 'satellite: %s\n' % satellite[p]
1278 + 'instrument: %s\n' % instrument[p]
1279 + 'confidence: %s\n' % conf
1280 + 'FRP: %s\n' % frp
1281 + 'scan angle: %s\n' % angle
1282 + 'along-scan: %s\n' % scan
1283 + 'along-track: %s\n' % track
1284 + '</description>\n')
1285 kml.write('<TimeStamp><when>%s</when></TimeStamp>\n' % timestamp)
1286 kml.write('<Data name="FRP"><value>%s</value></Data>\n' % frp)
1287 if prod == 'AF':
1288 if conf < 30:
1289 kml.write('<styleUrl> modis_conf_low </styleUrl>\n')
1290 elif conf < 80:
1291 kml.write('<styleUrl> modis_conf_med </styleUrl>\n')
1292 else:
1293 kml.write('<styleUrl> modis_conf_high </styleUrl>\n')
1294 elif prod == 'TF':
1295 kml.write('<Style>\n'+'<PolyStyle>\n'
1296 +'<color>%s</color>\n' % cols[t]
1297 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1298 +'</Style>\n')
1299 elif prod == 'FRP':
1300 frpx = min(40,np.ceil(frp/10.)-1)
1301 kml.write('<styleUrl> %s </styleUrl>\n' % frp_style[frpx] )
1302 elif prod == 'NF':
1303 kml.write('<styleUrl> no_fire </styleUrl>\n')
1304 elif prod == 'AFN':
1305 if conf < 80:
1306 kml.write('<Style>\n'+'<PolyStyle>\n'
1307 +'<color>7000ffff</color>\n'
1308 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1309 +'</Style>\n')
1310 elif conf < 90:
1311 kml.write('<Style>\n'+'<PolyStyle>\n'
1312 +'<color>7000a5ff</color>\n'
1313 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1314 +'</Style>\n')
1315 else:
1316 kml.write('<Style>\n'+'<PolyStyle>\n'
1317 +'<color>700000ff</color>\n'
1318 +'<outline>0</outline>\n'+'</PolyStyle>\n'
1319 +'</Style>\n')
1321 kml.write('<Polygon>\n<outerBoundaryIs>\n<LinearRing>\n<coordinates>\n')
1323 km_lon=km_lat/np.cos(lat*np.pi/180) # 1 km in longitude
1325 sq_track_size_km=track
1326 sq2_lat=km_lat * sq_track_size_km/2
1327 sq_scan_size_km=scan
1328 sq2_lon=km_lon * sq_scan_size_km/2
1330 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat - sq2_lat))
1331 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat + sq2_lat))
1332 kml.write('%s,%s,0\n' % (lon + sq2_lon, lat + sq2_lat))
1333 kml.write('%s,%s,0\n' % (lon + sq2_lon, lat - sq2_lat))
1334 kml.write('%s,%s,0\n' % (lon - sq2_lon, lat - sq2_lat))
1336 kml.write('</coordinates>\n</LinearRing>\n</outerBoundaryIs>\n</Polygon>\n</Placemark>\n')
1337 kml.write('</Folder>\n')
1339 kml.write('</Folder>\n')
1340 kml.write('</Document>\n</kml>\n')
1342 print 'Created file %s' % kml_path
1343 else:
1344 print 'Any detections to be saved as %s' % kml_path
1346 def burned_algorithm(data):
1348 Computes mask of burned scar pixels
1350 :param data: data dictionary with all the necessary bands M7, M8, M10 and M11
1351 :return C: Mask of burned scar pixels
1353 Developed in Python 2.7.15 :: Anaconda 4.5.10, on MACINTOSH.
1354 Angel Farguell (angel.farguell@gmail.com) 2019-01-03
1356 # Parameters
1357 RthSub=0.05
1358 Rth=0.81
1359 M07UB=0.19
1360 M08LB=0.00
1361 M08UB=0.28
1362 M10LB=0.07
1363 M10UB=1.00
1364 M11LB=0.05
1365 eps=1e-30
1366 # Bands
1367 M7=data.M7
1368 M8=data.M8
1369 M10=data.M10
1370 M11=data.M11
1371 # Eq 1a
1372 M=(M8.astype(float)-RthSub)/(M11.astype(float)+eps)
1373 C1=np.logical_and(M>0,M<Rth)
1374 # Eq 1b
1375 C2=np.logical_and(M8>M08LB,M8<M08UB)
1376 # Eq 1c
1377 C3=M7<M07UB
1378 # Eq 1d
1379 C4=M11>M11LB
1380 # Eq 1e
1381 C5=np.logical_and(M10>M10LB,M10<M10UB)
1382 # All the conditions at the same time
1383 C=np.logical_and(np.logical_and(np.logical_and(np.logical_and(C1,C2),C3),C4),C5)
1384 return C
1386 if __name__ == "__main__":
1387 bbox=[-132.86966,-102.0868788,44.002495,66.281204]
1388 time = ("2012-09-11T00:00:00Z", "2012-09-12T00:00:00Z")
1389 data=retrieve_af_data(bbox,time)
1390 # Save the data dictionary into a matlab structure file out.mat
1391 sio.savemat('out.mat', mdict=data)