-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathGMV_utils.py
3578 lines (3128 loc) · 166 KB
/
GMV_utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python3
__author__ = "Angel Ling"
import obspy
import string
import pickle
import numpy as np
from os import listdir
from obspy.geodetics.base import gps2dist_azimuth # distance in m, azi and bazi in degree
from obspy.geodetics import kilometers2degrees # distance in degree
from obspy.geodetics.flinnengdahl import FlinnEngdahl
from obspy.signal.rotate import rotate_ne_rt
from obspy.taup import TauPyModel
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
from matplotlib.offsetbox import OffsetImage, AnnotationBbox
from matplotlib.cbook import simple_linear_interpolation
from matplotlib.gridspec import GridSpec
from operator import itemgetter
from obspy.signal.array_analysis import array_processing
from matplotlib.colorbar import ColorbarBase
from matplotlib.ticker import MultipleLocator
# ==================================
# %% General variables and functions
KM_PER_DEG = 111.1949
cmap_fk = plt.cm.get_cmap('hot_r') # color map for GMV
# Set up AA domain
global AA_lat1, AA_lat2, AA_lon1, AA_lon2, box, dlat, dlon
AA_lat1 = 40. # starting latitude 40N
AA_lat2 = 52. # ending latitude 52N
AA_lon1 = 0. # starting longitude 0E
AA_lon2 = 22. # ending longitude 22E
# Create grids on map
box = 4 # number of boxes on each side
dlat = (AA_lat2-AA_lat1)/box
dlon = (AA_lon2-AA_lon1)/box
def maxabs(x):
return max(abs(x))
# Function to calculate the antipode
def antipode(lat, lon):
lat *= -1
if lon > 0:
lon -= 180
elif lon <= 0:
lon += 180
return lat, lon
# Degrees to cardinal directions
def degrees_to_cardinal(d):
dirs = ["N", "NNE", "NE", "ENE", "E", "ESE", "SE", "SSE",
"S", "SSW", "SW", "WSW", "W", "WNW", "NW", "NNW"]
ix = round(d / (360. / len(dirs)))
return dirs[ix % len(dirs)]
# ==================================
# %% Event catalog
def readevent(event_name, data_directory, local=False):
"""
Read QUAKEML or pkl downloaded via obspyDMT
:param event_name:
:param data_directory:
:param local: plot local event
:return: event dictionary
"""
print("Reading event "+event_name+"...")
print("Reading event catalog "+event_name+"...")
event_dic = {}
if not local:
try: # QUAKEML
cat = obspy.read_events(data_directory+"../EVENTS-INFO/catalog.ml", format="QUAKEML")
ev = cat[0]
origin = ev.preferred_origin()
lat_event = origin.latitude
lon_event = origin.longitude
dep_event = origin.depth/1000.
ev_otime = origin.time
time_event_sec = ev_otime.timestamp
year = ev_otime.year
month = ev_otime.month
day = ev_otime.day
hour = ev_otime.hour
minute = ev_otime.minute
second = float(ev_otime.second) + ev_otime.microsecond / 1E6
if ev.preferred_magnitude().magnitude_type in ["mw", "mww"]:
mag_type = ev.preferred_magnitude().magnitude_type
mag_event = ev.preferred_magnitude().mag
else:
for _i in ev.magnitudes:
if _i.magnitude_type in ["Mw", "Mww"]:
mag_type = _i.magnitude_type
mag_event = _i.mag
else:
mag_type = _i.magnitude_type
mag_event = _i.mag
except FileNotFoundError: # Event pkl
ev_pkl = pickle.load(open(data_directory+"info/event.pkl", "rb"))
lat_event = ev_pkl.get('latitude')
lon_event = ev_pkl.get('longitude')
dep_event = ev_pkl.get('depth')
ev_otime = ev_pkl.get("datetime")
time_event_sec = ev_otime.timestamp
year = ev_otime.year
month = ev_otime.month
day = ev_otime.day
hour = ev_otime.hour
minute = ev_otime.minute
second = float(ev_otime.second) + ev_otime.microsecond / 1E6
mag_type = ev_pkl.get("magnitude_type")
mag_event = ev_pkl.get("magnitude")
# Enter local event info by hand
elif local:
lat_event = -20.546#46.91
lon_event = -175.390#9.13
dep_event = 0.0#0.3
ev_otime = obspy.UTCDateTime("2022-01-15 04:14:45")#obspy.UTCDateTime("2020-10-25 19:35:43")
time_event_sec = ev_otime.timestamp
year = ev_otime.year
month = ev_otime.month
day = ev_otime.day
hour = ev_otime.hour
minute = ev_otime.minute
second = float(ev_otime.second) + ev_otime.microsecond / 1E6
mag_type = "M" #"ML"
mag_event = 5.8 #4.4
fe = FlinnEngdahl()
event_dic['event_name'] = event_name
event_dic['region'] = fe.get_region(lon_event,lat_event)
event_dic['lat'] = lat_event
event_dic['lon'] = lon_event
event_dic['depth'] = dep_event
event_dic["ev_otime"] = ev_otime
event_dic['time_sec'] = time_event_sec
event_dic['year'] = year
event_dic['month'] = month
event_dic['day'] = day
event_dic['hour'] = hour
event_dic['minute'] = minute
event_dic['second'] = second
event_dic['mag_type'] = mag_type
event_dic['mag'] = mag_event
return event_dic
# ==================================
# %% Read and process streams and inventories
def keep_trace(st, keep):
newst = obspy.Stream()
for i, tr in enumerate(st):
if i in keep:
newst += tr
return newst
def filter_streams(st, f1, f2, f=None, ftype="bandpass"):
"""
Filter streams
:param st: stream
:param f1: lower frequency
:param f2: higher frequency
:param f: frequency for lowpass/highpass
:param ftype: filter type, default: "bandpass"
:return: filtered stream
"""
st_filtered = st.copy()
st_filtered.detrend("demean")
st_filtered.detrend('linear')
st_filtered.taper(0.05, 'cosine')
if ftype == "bandpass":
st_filtered.filter(ftype, freqmin=f1, freqmax=f2, corners=4, zerophase=True)
else:
st.filter(ftype, freq=f, corners=4, zerophase=True)
st_filtered.detrend('linear') # detrend after filtering
# st_filtered.decimate(2) # downsample the stream
return st_filtered
def read_data_inventory(prodata_directory, resp_directory, event_dic, tstart=0, tend=7200, read_obs=False, plot_3c=True):
"""
Read raw data and station inventory
:param prodata_directory: data directory
:param resp_directory: inventory directory
:param event_dic: event dictionary
:param read_obs: option to read OBS data
:param plot_3c: option to plot in 3 component
:return: dictionary with station data and inventory
"""
print ('Reading data and inventory...')
st1 = obspy.Stream() # Z channels
st2 = obspy.Stream() # N channels
st3 = obspy.Stream() # E channels
lat_sta = np.array([])
lon_sta = np.array([])
elv_sta = np.array([])
dist_sta = np.array([])
azi_sta = np.array([])
bazi_sta = np.array([])
name_sta = np.array([])
net_sta = np.array([])
lat_event = event_dic["lat"]
lon_event = event_dic["lon"]
region = event_dic["region"]
event_time = event_dic["time_sec"]
stime = obspy.UTCDateTime(event_time + tstart)
etime = obspy.UTCDateTime(event_time + tend)
data_dict_list = []
obs = 0 # obs counter
Z12_count = 0 # Z12 station counter
print ('Processing data of '+event_dic['mag_type'].capitalize()+"%.1f "%event_dic['mag']+region+' earthquake...')
print ('Reading trace from '+str(tstart)+' to '+str(tend)+'s from the origin time...')
for file in sorted(listdir(prodata_directory)):
if file.endswith('HHZ'):
fsp = file.split(".")
try:
# HHZ
tr11 = obspy.read(prodata_directory+file, starttime=stime, endtime=etime) # read the trace
inv_sta = obspy.read_inventory(resp_directory+'STXML.'+file) # read inventory
# Stations with data missing more than 10% of their samples are discarded.
data_count = (tend-tstart)*tr11[0].stats.sampling_rate - ((tend-tstart)*tr11[0].stats.sampling_rate*0.1)
if tr11[0].stats.npts < data_count:
continue
except Exception as e: # If file not present, skip this station
# print(e)
continue
# Read station location
lat = inv_sta[0][0].latitude
lon = inv_sta[0][0].longitude
elv = inv_sta[0][0].elevation /1000. # in km
dist, azi, bazi = gps2dist_azimuth(lat_event, lon_event, lat, lon)
dist_deg = kilometers2degrees(dist/1000.)
# Ignore stations outside the plotting region
if lat < AA_lat1-0.1 or lat > AA_lat2+0.1:
continue
if lon < AA_lon1-0.1 or lon > AA_lon2+0.1:
continue
tr11[0].stats.distance = dist_deg
tr11[0].stats.backazimuth = bazi
tr11[0].stats["coordinates"] = {}
tr11[0].stats["coordinates"]["latitude"] = lat
tr11[0].stats["coordinates"]["longitude"] = lon
tr11[0].stats["coordinates"]["elevation"] = elv
try: # Read HHN
tr22 = obspy.read(prodata_directory+fsp[0]+"."+fsp[1]+"."+fsp[2]+".HHN", starttime=stime, endtime=etime)
if tr22[0].stats.npts < data_count:
continue
tr22[0].stats.distance = dist_deg
tr22[0].stats.backazimuth = bazi
tr22[0].stats["coordinates"] = {}
tr22[0].stats["coordinates"]["latitude"] = lat
tr22[0].stats["coordinates"]["longitude"] = lon
tr22[0].stats["coordinates"]["elevation"] = elv
except Exception as e: # If file not present, skip this station
# print(e)
try: # Read HH1
tr22 = obspy.read(prodata_directory+fsp[0]+"."+fsp[1]+"."+fsp[2]+".HH1", starttime=stime, endtime=etime)
if tr22[0].stats.npts < data_count:
continue
except Exception as e:
if plot_3c:
continue
else:
# If there's no HHN channel
tr22 = obspy.Trace(np.zeros(len(tr11[0].data)))
tr22.stats.network = tr11[0].stats.network
tr22.stats.station = tr11[0].stats.station
tr22.stats.location = tr11[0].stats.location
tr22.stats.delta = tr11[0].stats.delta
tr22.stats.starttime= tr11[0].stats.starttime
tr22.stats.channel = "HHN"
tr22.stats.distance = dist_deg
tr22.stats.backazimuth = bazi
tr22.stats["coordinates"] = {}
tr22.stats["coordinates"]["latitude"] = lat
tr22.stats["coordinates"]["longitude"] = lon
tr22.stats["coordinates"]["elevation"] = elv
tr22 = obspy.Stream(traces=[tr22])
try: # Read HHE
tr33 = obspy.read(prodata_directory+fsp[0]+"."+fsp[1]+"."+fsp[2]+".HHE", starttime=stime, endtime=etime)
if tr33[0].stats.npts < data_count:
continue
tr33[0].stats.distance = dist_deg
tr33[0].stats.backazimuth = bazi
tr33[0].stats["coordinates"] = {}
tr33[0].stats["coordinates"]["latitude"] = lat
tr33[0].stats["coordinates"]["longitude"] = lon
tr33[0].stats["coordinates"]["elevation"] = elv
except Exception as e:
try: # Read HH2
tr33 = obspy.read(prodata_directory+fsp[0]+"."+fsp[1]+"."+fsp[2]+".HH2", starttime=stime, endtime=etime)
if tr33[0].stats.npts < data_count:
continue
except Exception as e: # If file not present, skip this station
# print(e)
if plot_3c:
continue
else:
# If there's no HHE channel
tr33 = obspy.Trace(np.zeros(len(tr11[0].data)))
tr33.stats.network = tr11[0].stats.network
tr33.stats.station = tr11[0].stats.station
tr33.stats.location = tr11[0].stats.location
tr33.stats.delta = tr11[0].stats.delta
tr33.stats.starttime= tr11[0].stats.starttime
tr33.stats.channel = "HHE"
tr33.stats.distance = dist_deg
tr33.stats.backazimuth = bazi
tr33.stats["coordinates"] = {}
tr33.stats["coordinates"]["latitude"] = lat
tr33.stats["coordinates"]["longitude"] = lon
tr33.stats["coordinates"]["elevation"] = elv
tr33 = obspy.Stream(traces=[tr33])
if tr22[0].stats.channel == "HHN" and tr33[0].stats.channel == "HHE":
tr1 = tr11
tr2 = tr22
tr3 = tr33
elif tr22[0].stats.channel == "HH1" and tr33[0].stats.channel == "HH2":
trZ12 = tr11+tr22+tr33
try:
invZNE = obspy.read_inventory(resp_directory+'STXML.'+fsp[0]+"."+fsp[1]+"."+fsp[2]+"*")
trZNE = trZ12.copy()._rotate_to_zne(invZNE, components=('Z12')) # rotate to ZNE
except Exception:
continue
for tr in trZNE:
if tr.stats.channel.endswith('Z'):
tr1 = tr.copy()
tr1.stats.distance = dist_deg
tr1.stats.backazimuth = bazi
tr1.stats["coordinates"] = {}
tr1.stats["coordinates"]["latitude"] = lat
tr1.stats["coordinates"]["longitude"] = lon
tr1.stats["coordinates"]["elevation"] = elv
elif tr.stats.channel.endswith('N'):
tr2 = tr.copy()
tr2.stats.distance = dist_deg
tr2.stats.backazimuth = bazi
tr2.stats["coordinates"] = {}
tr2.stats["coordinates"]["latitude"] = lat
tr2.stats["coordinates"]["longitude"] = lon
tr2.stats["coordinates"]["elevation"] = elv
elif tr.stats.channel.endswith('E'):
tr3 = tr.copy()
tr3.stats.distance = dist_deg
tr3.stats.backazimuth = bazi
tr3.stats["coordinates"] = {}
tr3.stats["coordinates"]["latitude"] = lat
tr3.stats["coordinates"]["longitude"] = lon
tr3.stats["coordinates"]["elevation"] = elv
Z12_count += 1
else:
continue
# Read all OBS channels
elif file.endswith('CHZ'):
if not read_obs: # If False, don't read OBS stations
continue
fsp = file.split(".")
try:
file_ch = fsp[0]+"."+fsp[1]+"."+fsp[2]+".CH*"
ch = obspy.read(prodata_directory+file_ch, starttime=stime, endtime=etime) # read as a stream
inv_sta = obspy.read_inventory(resp_directory+'STXML.'+file_ch) # read all channel inventory
ch_zne = ch.copy()._rotate_to_zne(inv_sta, components=('Z12')) # rotate to ZNE
# Read station location
lat = inv_sta[0][0].latitude
lon = inv_sta[0][0].longitude
elv = inv_sta[0][0].elevation /1000. # in km
dist, azi, bazi = gps2dist_azimuth(lat_event, lon_event, lat, lon)
dist_deg = kilometers2degrees(dist/1000.) # in km
# Assign rotated channels back to separate trace
for tr in ch_zne:
if tr.stats.channel.endswith('Z'):
tr1 = tr.copy()
tr1.stats.channel = "CHZ"
tr1.stats["coordinates"] = {}
tr1.stats["coordinates"]["latitude"] = lat
tr1.stats["coordinates"]["longitude"] = lon
tr1.stats["coordinates"]["elevation"] = elv
elif tr.stats.channel.endswith('N'):
tr2 = tr.copy()
tr2.stats.channel = "CHN"
tr2.stats["coordinates"] = {}
tr2.stats["coordinates"]["latitude"] = lat
tr2.stats["coordinates"]["longitude"] = lon
tr2.stats["coordinates"]["elevation"] = elv
elif tr.stats.channel.endswith('E'):
tr3 = tr.copy()
tr3.stats.channel = "CHE"
tr3.stats["coordinates"] = {}
tr3.stats["coordinates"]["latitude"] = lat
tr3.stats["coordinates"]["longitude"] = lon
tr3.stats["coordinates"]["elevation"] = elv
obs += 1 # Count obs stations
except Exception: # If file not present, skip this station
continue
else:
continue
# Remove stations below 41N and beyond 52N
if lat <= 41. or lat > 52.:
continue
# Array list
arraydic = {}
arraydic["net_sta"] = fsp[0]
arraydic["name_sta"] = fsp[0]+"."+fsp[1]+"."+fsp[2]
arraydic["lat_sta"] = lat
arraydic["lon_sta"] = lon
arraydic["elv_sta"] = elv
arraydic["dist_sta"] = dist_deg
arraydic["bazi_sta"] = bazi
arraydic["azi_sta"] = azi
arraydic["tr"] = tr1
arraydic["tr_N"] = tr2
arraydic["tr_E"] = tr3
data_dict_list.append(arraydic)
# Show number of OBS
if obs != 0:
print ('Read ' + str(obs)+ ' OBS stations.')
if Z12_count != 0:
print ('Read ' + str(Z12_count)+ ' Z12 stations.')
# Sort the array list by distance
print ('Sorting the list of dictionaries according to distance in degree...')
data_dict_list.sort(key=itemgetter('dist_sta'))
for i in range(len(data_dict_list)):
# Store data in arrays
st1 += data_dict_list[i]["tr"]
st2 += data_dict_list[i]["tr_N"]
st3 += data_dict_list[i]["tr_E"]
lat_sta = np.append(lat_sta, data_dict_list[i]["lat_sta"])
lon_sta = np.append(lon_sta, data_dict_list[i]["lon_sta"])
elv_sta = np.append(elv_sta, data_dict_list[i]["elv_sta"])
dist_sta = np.append(dist_sta, data_dict_list[i]["dist_sta"])
bazi_sta = np.append(bazi_sta, data_dict_list[i]["bazi_sta"])
azi_sta = np.append(azi_sta, data_dict_list[i]["azi_sta"])
net_sta = np.append(net_sta, data_dict_list[i]["net_sta"])
name_sta = np.append(name_sta, data_dict_list[i]["name_sta"])
# Storing raw data
print ('Storing raw data...')
data_inv_dic = {}
# inv
data_inv_dic["net_sta"] = net_sta
data_inv_dic["name_sta"] = name_sta
data_inv_dic["lat_sta"] = lat_sta
data_inv_dic["lon_sta"] = lon_sta
data_inv_dic["elv_sta"] = elv_sta
data_inv_dic["dist_sta"] = dist_sta
data_inv_dic["bazi_sta"] = bazi_sta
data_inv_dic["azi_sta"] = azi_sta
# streams
data_inv_dic["st"] = st1
data_inv_dic["st_N"] = st2
data_inv_dic["st_E"] = st3
return data_inv_dic
def Normalize(data_inv_dic, event_dic, f1, f2, start, end, decimate_fc=2, threshold=None, plot_Xsec=False, plot_ZeroX=False):
"""
Filter and normalize streams for one single event
:param data_inv_dic: data and inventory dictionary
:param event_dic: event dictionary
:param f1: lower frequency
:param f2: higher frequency
:param start: start time of the stream
:param end: end time of the stream
:param plot_Xsec: option for plotting cross-sections
:return: dictionary of processed data and dictionary of stream info
"""
gmv1 = np.array([]) # Z
gmv2 = np.array([]) # N
gmv3 = np.array([]) # E
gmv4 = np.array([]) # R
gmv5 = np.array([]) # T
tr1_std = np.array([])
tr2_std = np.array([])
tr3_std = np.array([])
st1 = data_inv_dic["st"]
st2 = data_inv_dic["st_N"]
st3 = data_inv_dic["st_E"]
lat_sta = data_inv_dic["lat_sta"]
lon_sta = data_inv_dic["lon_sta"]
name_sta = data_inv_dic["name_sta"]
dist_sta = data_inv_dic["dist_sta"]
bazi_sta = data_inv_dic["bazi_sta"]
azi_sta = data_inv_dic["azi_sta"]
region = event_dic['region']
time_event_sec = event_dic['time_sec']
mag = event_dic['mag']
# Starting and ending time after OT
if type(start) is obspy.core.utcdatetime.UTCDateTime and type(end) is obspy.core.utcdatetime.UTCDateTime:
starttime = start
endtime = end
else:
starttime = obspy.UTCDateTime(time_event_sec + start)
endtime = obspy.UTCDateTime(time_event_sec + end)
# Filter and downsample streams
print ('Filtering between %.2f s and %.2f s...' % (1/f2, 1/f1))
st_all_raw = st1+st2+st3
st_all_f = filter_streams(st_all_raw, f1, f2)
print(st_all_f)
print ('Trimming traces from '+str(start)+'s to '+str(end)+'s...')
# Trim the filtered traces
try:
st_all_f.interpolate(sampling_rate=st_all_f[0].stats.sampling_rate, starttime=starttime)
print('Traces are interpoated.')
except Exception as e:
print(e)
st_all = st_all_f.trim(starttime, endtime, pad=True, fill_value=0)
# Downsample data by an integer factor
if decimate_fc:
print("Traces will be downsampled from "+str(st_all[0].stats.sampling_rate)+"Hz to "+str(st_all[0].stats.sampling_rate/decimate_fc)+"Hz")
st_all.decimate(decimate_fc)
else:
print("Traces will NOT be downsampled.")
st_all = st_all
print(st_all)
st = st_all.select(channel="??Z")
st_N = st_all.select(channel="??N")
st_E = st_all.select(channel="??E")
# print(len(st))
# print(len(st_N))
# print(len(st_E))
timestep = st[0].stats.delta # Sample distance in seconds (timestep)
nt = st[0].stats.npts # Total number of samples
sample_rate = st[0].stats.sampling_rate # Sampling rate in Hz
time_st = st[0].times() # stream time
# Normalize each trace by max, abs value and store in matrix
print ('Normalizing traces by the max, abs value...')
for i, (tr1, tr2, tr3, bazi) in enumerate(zip(st, st_N, st_E, bazi_sta)):
# Rotate HHN/HHE to Radial/Transverse before normalizing
if tr1.stats.station != tr2.stats.station or tr1.stats.station != tr3.stats.station or tr2.stats.station != tr3.stats.station:
print(tr1)
print("Not the same station")
try:
tr4,tr5 = rotate_ne_rt(tr2.copy().data, tr3.copy().data, bazi) # rotate N and E component to R and T
except Exception as e:
print(e)
print(tr2)
print(tr3)
continue
if plot_Xsec:
tr1_new = tr1.data/max(maxabs(tr1.data),maxabs(tr2.data),maxabs(tr3.data))
tr2_new = tr2.data/max(maxabs(tr2.data),maxabs(tr3.data))
tr3_new = tr3.data/max(maxabs(tr2.data),maxabs(tr3.data))
tr4_new = tr4/max(maxabs(tr4),maxabs(tr5))
tr5_new = tr5/max(maxabs(tr4),maxabs(tr5))
else:
tr1_new = tr1.data/maxabs(tr1.data)
tr2_new = tr2.data/max(maxabs(tr2.data),maxabs(tr3.data))
tr3_new = tr3.data/max(maxabs(tr2.data),maxabs(tr3.data))
tr4_new = tr4/maxabs(tr4)
tr5_new = tr5/maxabs(tr5)
tr1_std = np.append(tr1_std, tr1_new.std())
tr2_std = np.append(tr2_std, (tr2.data/maxabs(tr2.data)).std())
tr3_std = np.append(tr3_std, (tr3.data/maxabs(tr3.data)).std())
gmv1 = np.append(gmv1, tr1_new)
gmv2 = np.append(gmv2, tr2_new)
gmv3 = np.append(gmv3, tr3_new)
gmv4 = np.append(gmv4, tr4_new)
gmv5 = np.append(gmv5, tr5_new)
# Replace nans with zeros
gmv1 = np.reshape(gmv1, (len(st),len(tr1_new.data)))
gmv1[np.isnan(gmv1)] = 0
gmv2 = np.reshape(gmv2, (len(st),len(tr1_new.data)))
gmv2[np.isnan(gmv2)] = 0
gmv3 = np.reshape(gmv3, (len(st),len(tr1_new.data)))
gmv3[np.isnan(gmv3)] = 0
gmv4 = np.reshape(gmv4, (len(st),len(tr1_new.data)))
gmv4[np.isnan(gmv4)] = 0
gmv5 = np.reshape(gmv5, (len(st),len(tr1_new.data)))
gmv5[np.isnan(gmv5)] = 0
# Remove noisy stations
print ('Removing noisy stations...')
# Remove traces with STD above a threshold # default: 0.3
if threshold:
threshold = threshold # set by hand
else:
if mag >= 7.0:
threshold = 0.25 # default: 0.25
else:
threshold = 0.3 # default: 0.3
goodstations = ((tr1_std <= threshold) & (tr2_std <= threshold) & (tr3_std <= threshold)) # Store good stations
# Store good stations
normalized_dic = {}
normalized_dic["name_sta"] = name_sta[goodstations]
normalized_dic["lat_sta"] = lat_sta[goodstations]
normalized_dic["lon_sta"] = lon_sta[goodstations]
normalized_dic["dist_sta"] = dist_sta[goodstations]
normalized_dic["bazi_sta"] = bazi_sta[goodstations]
normalized_dic["azi_sta"] = azi_sta[goodstations]
# Store good streams
normalized_dic["GMV_Z"] = gmv1[goodstations]
normalized_dic["GMV_N"] = gmv2[goodstations]
normalized_dic["GMV_E"] = gmv3[goodstations]
normalized_dic["GMV_R"] = gmv4[goodstations]
normalized_dic["GMV_T"] = gmv5[goodstations]
if plot_ZeroX: # Zero crossings
print("Zero crossing data is saved.")
gmv_ZeroX = gmv1[goodstations]
gmv_ZeroX[abs(gmv_ZeroX)<=0.01] = 1 # only +/- 1%
gmv_ZeroX[gmv_ZeroX!=1] = 0 # turn off the rest
normalized_dic["GMV_ZeroX"] = gmv_ZeroX
print ('Data processing for one single event DONE!')
print ('The total number of stations of '+region+' earthquake: '+str(len(name_sta[goodstations])))
return normalized_dic, dict(start=start, end=end, timestep=timestep, nt=nt, sample_rate=sample_rate, region=region, time_st=time_st)
def station_phases(GMV, station, event_dic, model, phases):
"""
Get single station info and arrivals
:param GMV: processed data dictionary
:param station: station name
:param event_dic: event dictionary
:param model: earth model
:param phases: phases to plot on seismograms and ray path plot
:return: dictionary of station info and arrivals
"""
# Select a station by name
name_sta = GMV["name_sta"].tolist()
dist_sta = GMV["dist_sta"]
lat_sta = GMV["lat_sta"]
lon_sta = GMV["lon_sta"]
bazi_sta = GMV["bazi_sta"]
dep_event = event_dic['depth']
print("Reading station "+station+"...")
try:
sta_index = name_sta.index(station) # station index
except ValueError:
print("Station not found. Selecting a random station...")
station = name_sta[int(len(name_sta)/2)] # Select a station by name
sta_index = name_sta.index(station) # station index
print("Getting "+station+" arrival time...")
# Phases and their travel times
model_ev = TauPyModel(model=model)
arr = model_ev.get_ray_paths(source_depth_in_km=dep_event, distance_in_degree=dist_sta[sta_index],phase_list=phases)
print("Station "+station+" read!")
return dict(arr=arr,
sta_index=sta_index,
sta_name=station,
sta_dist=dist_sta[sta_index],
sta_lat=lat_sta[sta_index],
sta_lon=lon_sta[sta_index],
sta_bazi=bazi_sta[sta_index])
def get_slowness(model, dist_deg_array, event_dic, phase):
"""
Get slownesses of a particular phase across an array
:param model: earth model
:param dist_deg_array: station distances of an array
:param event_dic: event dictionary
:param phase: selected phase
:return: slownesses of a particular phase
"""
rayp_deg_array = np.array([])
model_ev = TauPyModel(model=model)
dep_event = event_dic['depth']
for dist_deg in dist_deg_array:
arr_path = model_ev.get_ray_paths(source_depth_in_km=dep_event, distance_in_degree=dist_deg, phase_list=phase)
for i in range(len(arr_path)):
rayp = arr_path[i].ray_param # ray parameter in [s/rad]
rayp_deg = rayp * np.pi / 180 # ray parameter in [s/deg]
rayp_deg_array = np.append(rayp_deg_array, rayp_deg)
return rayp_deg_array
def select_Xsec(GMV, mode="azi"):
"""
Select stations for plotting cross-section
:param GMV: Processed data dictionary
:param mode: selecting stations based on latitude, backazimuth or azimuth, default: "azi"
:return: cross-section dictionary
"""
if mode == "lat":
lon_sta_new = GMV["lon_sta"]
cross_sec = (lon_sta_new >= 10.5) & (lon_sta_new <= 11.5) # stations within this longitude range
elif mode == "bazi":
bazi_sta_new = GMV["bazi_sta"]
cross_sec = (bazi_sta_new >= np.median(bazi_sta_new)-0.5) & (bazi_sta_new <= np.median(bazi_sta_new)+0.5) # stations within this backazimuth range
else:
azi_sta_new = GMV["azi_sta"]
cross_sec = (azi_sta_new >= np.median(azi_sta_new)-0.5) & (azi_sta_new <= np.median(azi_sta_new)+0.5) # stations within this azimuth range
if GMV["lat_sta"][cross_sec][0] > 46.:
facenorth = True
print("The cross-section is facing North-ish.")
else:
facenorth = False
print("The cross-section is facing South-ish.")
bazi_Xsec = np.median(GMV["bazi_sta"][cross_sec])
if bazi_Xsec >= 0. and bazi_Xsec <= 180. :
bazi_Xsec_r = bazi_Xsec + 180.
elif bazi_Xsec > 180.:
bazi_Xsec_r = bazi_Xsec - 180.
leftDir = degrees_to_cardinal(bazi_Xsec)
rightDir = degrees_to_cardinal(bazi_Xsec_r)
# Find the center station among the selected stations
center_station = GMV["name_sta"][cross_sec][int(len(GMV["name_sta"][cross_sec])/2)+1]
# Distance label for cross-section
dist_sta_Xsec = GMV["dist_sta"][cross_sec] # Distance of selected stations
dist_range = np.arange(int(min(dist_sta_Xsec)-2), int(max(dist_sta_Xsec)+2), 2)
dist_label_Xsec = []
for i in dist_range:
dist_label_Xsec.append(str(i)+'\N{DEGREE SIGN}')
return dict(cross_sec=cross_sec,
facenorth=facenorth,
leftDir=leftDir,
rightDir=rightDir,
center_station=center_station,
dist_sta_Xsec=dist_sta_Xsec,
dist_label_Xsec=dist_label_Xsec)
def select_FK(event_dic, data_dic, start, end, model, freq, thechosenone, decimate_fc=2, radius=100., threshold=0.25, station_list=None, plot_CH_only=False):
"""
Select stations for FK analysis (vertical channels only)
:param event_dic: event dictionary
:param data_dic: raw data and inventory dictionary
:param start: start of stream
:param end: end of stream
:param model: earth model
:param freq: f11 (min1), f22 (max1), f111 (min2), f222 (max)
:param thechosenone: the center station of the subarray for FK-analysis
:param plot_CH_only: use Swiss network as subarray
:return: FK dictionary with subarray streams and info
"""
st_new = obspy.Stream()
baz_rec = []
lat_rec = []
lon_rec = []
elv_rec = []
dist_rec = []
if type(start) is obspy.core.utcdatetime.UTCDateTime and type(end) is obspy.core.utcdatetime.UTCDateTime:
starttime_fk = start
endtime_fk = end
else:
starttime_fk = obspy.UTCDateTime(event_dic["time_sec"] + start)
endtime_fk = obspy.UTCDateTime(event_dic["time_sec"] + end)
for i in range(len(data_dic["net_sta"])):
if plot_CH_only:
if data_dic["net_sta"][i] == "CH":
if (data_dic["st"][i].data/maxabs(data_dic["st"][i].data)).std() > threshold:
continue
else:
st_new += data_dic["st"][i] # HHZ
elif station_list:
if data_dic["name_sta"][i] in station_list:
if (data_dic["st"][i].data/maxabs(data_dic["st"][i].data)).std() > threshold:
continue
else:
st_new += data_dic["st"][i] # HHZ
baz_rec.append(data_dic["bazi_sta"][i])
lat_rec.append(data_dic["lat_sta"][i])
lon_rec.append(data_dic["lon_sta"][i])
elv_rec.append(data_dic["elv_sta"][i])
dist_rec.append(data_dic["dist_sta"][i])
else:
dist_fk, azi_fk, bazi_fk = gps2dist_azimuth(thechosenone["sta_lat"], thechosenone["sta_lon"], data_dic["lat_sta"][i], data_dic["lon_sta"][i])
dist_fk_km = dist_fk/1000.
if (dist_fk_km > 20. and dist_fk_km < radius) or dist_fk_km==0.: # distance from the center station
if (data_dic["st"][i].data/maxabs(data_dic["st"][i].data)).std() > threshold:
continue
else:
st_new += data_dic["st"][i] # HHZ
baz_rec.append(data_dic["bazi_sta"][i])
lat_rec.append(data_dic["lat_sta"][i])
lon_rec.append(data_dic["lon_sta"][i])
elv_rec.append(data_dic["elv_sta"][i])
dist_rec.append(data_dic["dist_sta"][i])
else:
continue
# Get network location/coord for FK analysis
if plot_CH_only:
net = "CH"
baz_rec = data_dic["bazi_sta"][data_dic["net_sta"] == net]
lat_rec = data_dic["lat_sta"][data_dic["net_sta"] == net]
lon_rec = data_dic["lon_sta"][data_dic["net_sta"] == net]
elv_rec = data_dic["elv_sta"][data_dic["net_sta"] == net]
dist_rec = data_dic["dist_sta"][data_dic["net_sta"] == net]
lats_rec = [min(lat_rec), max(lat_rec), max(lat_rec), min(lat_rec)] # lower left, upper left, upper right, lower right
lons_rec = [min(lon_rec), min(lon_rec), max(lon_rec), max(lon_rec)]
else:
lats_rec = [min(lat_rec), max(lat_rec), max(lat_rec), min(lat_rec)] # lower left, upper left, upper right, lower right
lons_rec = [min(lon_rec), min(lon_rec), max(lon_rec), max(lon_rec)]
# See array slowness
phase_test = ["4kmps"]
array_sl = get_slowness(model, dist_rec, event_dic, phase_test)
array_sl_center = get_slowness(model, np.array([thechosenone["sta_dist"]]), event_dic, phase_test)
# print(array_sl)
if len(array_sl) > 0:
for ph in phase_test:
# print("Median slowness of "+ph+" at the sub-array: %.2f" % np.median(array_sl))
# print("Mean slowness of "+ph+" at the sub-array: %.2f" % np.mean (array_sl))
print("The slowness of "+ph+" at the center of the sub-array: %.2f s/deg" % np.mean(array_sl_center))
else:
print("Selected phases are not present in this event.")
if len(freq) == 2:
f11, f22 = freq
st_fk1 = filter_streams(st_new.copy(), f11, f22) # filter for FK analysis
st_fk1.trim(starttime_fk, endtime_fk, pad=True, fill_value=0)
st_fk1.decimate(decimate_fc) # Downsample data by an integer factor
st_fk2 = obspy.Stream() # empty stream to pass in the dictionary
else:
f11, f22, f111, f222 = freq
st_fk1 = filter_streams(st_new.copy(), f11, f22) # filter for FK analysis1
st_fk1.trim(starttime_fk, endtime_fk, pad=True, fill_value=0)
st_fk1.decimate(decimate_fc) # Downsample data by an integer factor
st_fk2 = filter_streams(st_new.copy(), f111, f222) # filter for FK analysis2
st_fk2.trim(starttime_fk, endtime_fk, pad=True, fill_value=0)
st_fk2.decimate(decimate_fc) # Downsample data by an integer factor
print('The total number of stations used for FK-analysis: ' + str(len(st_fk1)))
return dict(baz_rec=baz_rec,
elv_rec=elv_rec,
dist_rec=dist_rec,
lat_rec=lat_rec,
lon_rec=lon_rec,
lats_rec=lats_rec,
lons_rec=lons_rec,
center_dist=thechosenone["sta_dist"],
st_fk1=st_fk1,
st_fk2=st_fk2,
starttime_fk=starttime_fk,
endtime_fk=endtime_fk)
# ==================================
# %% Plotting functions
def plot_ARF(subarray_dict, klim):
"""
Plot array response function
:param subarray_dict: event dictionary
:param klim: wavenumber
"""
#set limits for wavenumber differences to analyze
kxmin = -klim
kxmax = klim
kymin = -klim
kymax = klim
kstep = klim / 100.
# compute transfer function as a function of wavenumber difference
from obspy.signal.array_analysis import array_transff_wavenumber
coords = []
for (x, y, e) in zip(subarray_dict["lon_rec"], subarray_dict["lat_rec"], subarray_dict["elv_rec"]):
coords.append([x, y, e])
transff = array_transff_wavenumber(np.array(coords), klim, kstep, coordsys='lonlat')
# plot array response function
plt.pcolor(np.arange(kxmin, kxmax + kstep * 1.1, kstep) - kstep / 2.,
np.arange(kymin, kymax + kstep * 1.1, kstep) - kstep / 2.,
transff.T, cmap=plt.cm.get_cmap('hot_r'))
plt.colorbar()
plt.clim(vmin=0., vmax=0.1)
plt.xlim(kxmin, kxmax)
plt.xlabel("wavenumber")
plt.ylim(kymin, kymax)
plt.ylabel("wavenumber")
plt.title("Array Response Function")
plt.show()
def plot_dispersion_curve(data_dic, event_dic, station, chan, start_dc, end_dc, decimate_fc=2, save=None):
"""
Plot dispersion curve of a single station
:param data_dic: raw data and inventory dictionary
:param event_dic: event dictionary
:param station: station name
:param chan: channel, "Z", "R", "T"
:param start_dc: starting time
:param end_dc: ending time
:param save: figure directory, default: None, then it's not saved
"""
from scipy.signal import hilbert
# Select a station by name
name_sta = data_dic["name_sta"].tolist()
print("Plotting dispersion curve for station "+station+"...")
try:
thechosenone = name_sta.index(station) # station index
except ValueError:
print("Station not found. Selecting a random station...")
station = name_sta[int(len(name_sta)/2)] # Select a station by name
thechosenone = name_sta.index(station) # station index
fig, ax = plt.subplots(1,1, figsize=(7, 9))
iplot = 0
if chan == "R":
st_select = "st_R"
elif chan == "T":
st_select = "st_T"
else:
st_select = "st"
time_event_sec = event_dic['time_sec']
if type(start_dc) is obspy.core.utcdatetime.UTCDateTime and type(end_dc) is obspy.core.utcdatetime.UTCDateTime:
starttime_dc = start_dc
endtime_dc = end_dc
else:
starttime_dc = obspy.UTCDateTime(time_event_sec + start_dc)
endtime_dc = obspy.UTCDateTime(time_event_sec + end_dc)
st = data_dic[st_select][thechosenone]
st_work = st.copy()
st_work.detrend()
st_work.trim(starttime_dc, endtime_dc, pad=True, fill_value=0)
st_work.decimate(decimate_fc) # Downsample data by an integer factor
data = (st_work.data / maxabs(st_work.data)) * 0.6
ax.plot(st_work.times()+ start_dc, data, 'k', lw=0.65, label=name_sta[thechosenone]+chan)
ax.text(start_dc+50, iplot + 0.25, 'Broadband', verticalalignment='center', horizontalalignment='left')
for pcenter in 2**np.arange(3, 8, 0.25):
iplot += 1
st_work_filter = st.copy()
st_work_filter.detrend()
freqmin=1./(pcenter) / np.sqrt(2)
freqmax=1./(pcenter) * np.sqrt(2)
st_work_filter.filter('bandpass', freqmin=freqmin, freqmax=freqmax, zerophase=True)
st_work_filter.trim(starttime_dc, endtime_dc, pad=True, fill_value=0)
st_work_filter.decimate(decimate_fc) # Downsample data by an integer factor
data_filter = (st_work_filter.data/maxabs(st_work_filter.data)) * 0.5
ax.plot(st_work_filter.times()+ start_dc, data_filter + iplot, 'darkgrey')
ax.plot(st_work_filter.times()+ start_dc, abs(hilbert(data_filter)) + iplot, 'red')
ax.text(start_dc+50, iplot + 0.25, '%d s ' % pcenter, verticalalignment='center', horizontalalignment='left')
ax.text(start_dc+40, iplot + 0.75, 'Period', verticalalignment='center', horizontalalignment='left')
ax.set_xlim(start_dc, end_dc)
ax.set_ylim(-1, iplot+1.2)