/stream.py

https://github.com/trichter/sito · Python · 1511 lines · 1413 code · 19 blank · 79 comment · 107 complexity · a39af8d358931b5dbf2632d4a9563a20 MD5 · raw file

  1. # by TR
  2. from copy import copy
  3. from obspy.core import AttribDict, UTCDateTime, Stream as ObsPyStream
  4. from obspy.core.util import gps2DistAzimuth
  5. from sito import util, rf, imaging
  6. from sito.trace import Trace
  7. from sito.util import add_doc, cosTaper, calculate
  8. from sito.xcorr import timeNorm, spectralWhitening
  9. import glob
  10. import logging
  11. import numpy as np
  12. import obspy.core # @UnresolvedImport
  13. import obspy.signal # @UnresolvedImport
  14. import os.path
  15. log = logging.getLogger(__name__)
  16. def _reprSortedStats(stats):
  17. """
  18. Return sorted string representation of Stats object.
  19. """
  20. dict_copy = copy(stats)
  21. priorized_keys = ['network', 'station', 'location', 'channel',
  22. 'starttime', 'endtime', 'sampling_rate', 'delta',
  23. 'npts', 'calib', 'azi', 'inci', 'dist', 'slowness',
  24. 'filter', 'ponset', 'event']
  25. unwanted_keys = ['_format']
  26. for key in unwanted_keys:
  27. if key in dict_copy.keys():
  28. dict_copy.pop(key)
  29. # determine longest key name for alignment of all items
  30. head = ''.join(["%r: %r, " % (key, dict_copy.pop(key)) \
  31. for key in priorized_keys if key in dict_copy.keys()])
  32. head += ''.join(["%r: %r, " % (key, dict_copy.pop(key)) \
  33. for key in dict_copy.keys()])
  34. return 'obspy.core.Stats({%s})' % head[:-2]
  35. # def read(pathname_or_url, format=None, headonly=False, ** kwargs):
  36. # """Extend read function from stream module. sh entries are moved to stream."""
  37. # ms = obspy.core.read(pathname_or_url, format, headonly, ** kwargs)
  38. # if '.' in pathname_or_url:
  39. # pathname_or_url = pathname_or_url.rsplit('.', 1)[0]
  40. # if os.path.isfile(pathname_or_url + '.HAT'):
  41. # with open(pathname_or_url + '.HAT', 'r') as file:
  42. # content = file.read().split('\n')
  43. # for i in range(len(ms)):
  44. # ms[i].stats.update(eval(content[i]))
  45. # elif ms[0].stats._format == 'Q':
  46. # for tr in ms:
  47. # tr.stats['event'] = AttribDict()
  48. # for key_sh, key_opy in SH_OPY_EVENT_IDX.iteritems():
  49. # if key_sh in tr.stats.sh.keys():
  50. # tr.stats.event[key_opy] = tr.stats.sh[key_sh]
  51. # del tr.stats.sh[key_sh]
  52. # for key_sh, key_opy in SH_OPY_IDX.iteritems():
  53. # if key_sh in tr.stats.sh.keys():
  54. # tr.stats[key_opy] = tr.stats.sh[key_sh]
  55. # del tr.stats.sh[key_sh]
  56. # if len(tr.stats['sh']) == 0:
  57. # del tr.stats['sh']
  58. # if 'filter' in tr.stats.keys():
  59. # #DT;LP:max:25.00;DS by 2;
  60. # tr.stats.filter = tr.stats.filter.translate(None, ' |;:')
  61. # tr.stats.filter = tr.stats.filter.replace('DT', 'Dt').replace('max', '').replace('min', '').replace('by', '')
  62. # if not 'filter' in tr.stats.keys():
  63. # tr.stats['filter'] = ''
  64. # if 'comment' in tr.stats.keys():
  65. # tr.stats.filter += tr.stats.comment
  66. # del tr.stats['comment']
  67. # if 'mark' in tr.stats.keys():
  68. # tr.stats.mark = bool(tr.stats.mark)
  69. # return Stream(ms)
  70. @add_doc(obspy.core.read)
  71. def read(pathname_or_url, *args, ** kwargs): # format=None, headonly=False,
  72. """
  73. Read waveform files into an Stream object.
  74. Doc of obspy.core.read:
  75. """
  76. pathname_or_url, ext = os.path.splitext(pathname_or_url)
  77. ms = obspy.core.read(pathname_or_url + ext, *args, ** kwargs)
  78. ignore_starttime = kwargs.get('ignore_starttime', False)
  79. content = []
  80. hatfiles = glob.glob(pathname_or_url + '.HAT')
  81. hatfiles.sort()
  82. for hatfile in hatfiles:
  83. with open(hatfile, 'r') as file_:
  84. file_content = file_.read().strip('\n')
  85. file_content = file_content.replace('nan', 'np.nan')
  86. if file_content != '':
  87. content.extend(file_content.split('\n'))
  88. if len(content) > 0 and len(content) == len(ms):
  89. for i in range(len(ms)):
  90. try:
  91. st = eval(content[i]) # quiet slow and of course very bad
  92. except NameError as ex:
  93. if 'masked' in content[i]:
  94. content[i] = content[i].replace('masked', '-1')
  95. st = eval(content[i])
  96. else:
  97. raise ex
  98. if ms[i].stats.get('station') and ms[i].stats.station != st.station or \
  99. ms[i].stats.get('channel') and ms[i].stats.channel != st.channel or \
  100. not ignore_starttime and ms[i].stats.get('starttime') and abs(ms[i].stats.starttime - st.starttime) > 1:
  101. raise ValueError('Error while reading stats from HAT file_')
  102. ms[i].stats.update(eval(content[i]))
  103. elif ms[0].stats._format == 'Q':
  104. from sito.util.main import SH_OPY_EVENT_IDX, SH_OPY_IDX
  105. for tr in ms:
  106. tr.stats['event'] = AttribDict()
  107. for key_sh, key_opy in SH_OPY_EVENT_IDX.iteritems():
  108. if key_sh in tr.stats.sh.keys():
  109. tr.stats.event[key_opy] = tr.stats.sh[key_sh]
  110. del tr.stats.sh[key_sh]
  111. for key_sh, key_opy in SH_OPY_IDX.iteritems():
  112. if key_sh in tr.stats.sh.keys():
  113. tr.stats[key_opy] = tr.stats.sh[key_sh]
  114. del tr.stats.sh[key_sh]
  115. if len(tr.stats['sh']) == 0:
  116. del tr.stats['sh']
  117. if 'filter' in tr.stats.keys():
  118. # DT;LP:max:25.00;DS by 2;
  119. tr.stats.filter = tr.stats.filter.translate(None, ' |;:')
  120. tr.stats.filter = tr.stats.filter.replace('DT', 'Dt').replace('max', '').replace('min', '').replace('by', '')
  121. if not 'filter' in tr.stats.keys():
  122. tr.stats['filter'] = ''
  123. if 'comment' in tr.stats.keys():
  124. tr.stats.filter += tr.stats.comment
  125. del tr.stats['comment']
  126. if 'mark' in tr.stats.keys():
  127. tr.stats.mark = bool(tr.stats.mark)
  128. ms = Stream(ms)
  129. # write some event ids if not given
  130. if len(ms) > 0 and ms[0].stats.get('event') != None and ms[0].stats.event.get('id') == None:
  131. ms.setEventIDs()
  132. if len(ms) > 0 and ms[0].stats.get('filter') == None:
  133. ms.setHI('filter', '')
  134. if len(ms) > 0 and ms[0].stats.is_fft:
  135. ms2 = obspy.core.read(pathname_or_url + '_IM' + ext, *args, ** kwargs)
  136. for i, tr in enumerate(ms):
  137. tr.data = tr.data + 1j * ms2[i].data
  138. log.info('Load stream %s with %s traces from %s' % (ms.hash, len(ms), pathname_or_url))
  139. return ms
  140. class Stream(ObsPyStream):
  141. """
  142. Class derieved from obspy.core.Stream with some additional functionality.
  143. """
  144. @property
  145. def hash(self): # @ReservedAssignment
  146. return '%03d' % (hash(repr(self)) % 100)
  147. # @classmethod
  148. # def read(cls, *args, **kwargs):
  149. # return read(*args, **kwargs)
  150. # @classmethod
  151. # def stroptions(cls, * args, ** kwargs):
  152. # return ' '.join(['%s' % i for i in args] + ['%s=%s' % (i, j) for i, j in kwargs.iteritems()])
  153. @add_doc(obspy.core.Stream.write)
  154. def write(self, filename, format_, Qheader=False, ** kwargs):
  155. """
  156. Extend obspy.core.Stream.write with logging and writing special header filename.
  157. Doc of obspy.core.Stream.write:
  158. """
  159. util.checkDir(filename)
  160. if np.any([isinstance(tr.data, np.ma.MaskedArray) for tr in self]):
  161. log.debug('Filling some masked arrays in stream object before writing.')
  162. for tr in self:
  163. tr.data = np.ma.filled(tr.data)
  164. if Qheader and format_ == 'Q':
  165. from sito.util.main import SH_OPY_EVENT_IDX, SH_OPY_IDX
  166. for tr in self:
  167. if not 'sh' in tr.stats.keys():
  168. tr.stats['sh'] = AttribDict({})
  169. if 'event' in tr.stats.keys():
  170. for key_sh, key_opy in SH_OPY_EVENT_IDX.iteritems():
  171. if key_opy in tr.stats.event.keys():
  172. tr.stats['sh'][key_sh] = tr.stats.event[key_opy]
  173. for key_sh, key_opy in SH_OPY_IDX.iteritems():
  174. if key_opy in tr.stats.keys():
  175. tr.stats['sh'][key_sh] = tr.stats[key_opy]
  176. if 'filter' in tr.stats.keys() and len(tr.stats.filter) > 78:
  177. tr.stats.sh.FILTER = tr.stats.filter[:78]
  178. tr.stats.sh.COMMENT = tr.stats.filter[78:]
  179. if 'mark' in tr.stats.keys():
  180. tr.stats.sh.MARK = int(tr.stats.mark)
  181. tr.stats.sh['FILTER'] = ''
  182. elif format_ == 'SAC':
  183. for tr in self:
  184. if not 'sac' in tr.stats.keys():
  185. tr.stats['sac'] = AttribDict({})
  186. # save distance in sac header
  187. if 'dist' in tr.stats:
  188. tr.stats.sac.dist = tr.stats.dist
  189. if len(self) > 0:
  190. # from sito import ipshell
  191. # ipshell()
  192. if self[0].stats.is_fft:
  193. st2 = self.copy()
  194. for tr in st2:
  195. tr.data = np.imag(tr.data)
  196. super(Stream, st2).write(filename + '_IM', format_, ** kwargs)
  197. super(Stream, self).write(filename, format_, ** kwargs)
  198. if Qheader and format_ == 'Q':
  199. for tr in self:
  200. if 'event' in tr.stats.keys():
  201. for key_sh, key_opy in SH_OPY_EVENT_IDX.iteritems():
  202. if key_opy in tr.stats.event.keys():
  203. del tr.stats['sh'][key_sh]
  204. for key_sh, key_opy in SH_OPY_IDX.iteritems():
  205. if key_opy in tr.stats.keys():
  206. del tr.stats['sh'][key_sh]
  207. if 'filter' in tr.stats.keys() and len(tr.stats.filter) > 78:
  208. del tr.stats.sh.COMMENT
  209. if len(tr.stats['sh']) == 0:
  210. del tr.stats['sh']
  211. if format_ == 'Q':
  212. towrite = ''
  213. for tr in self:
  214. towrite += _reprSortedStats(tr.stats) + '\n'
  215. # towrite += repr(tr.stats) + '\n'
  216. with open(filename + '.HAT', 'w') as filename:
  217. filename.write(towrite)
  218. log.info('Write stream %s with %s traces to %s' % (self.hash, len(self), filename))
  219. def writex(self, filename, format_, years=True, stations=True, **kwargs):
  220. ye = list(set(self.getHI('event.datetime.year')))
  221. st = self.getStationList()
  222. if stations and years:
  223. for s in st:
  224. for y in ye:
  225. self.select(station=s, expr='st.event.datetime.year==%s' % y).write(filename % (s, y), format_, **kwargs)
  226. elif stations:
  227. for s in st:
  228. self.select(station=s).write(filename % s, format_, **kwargs)
  229. elif not stations and years:
  230. for y in ye:
  231. self.select(expr='st.event.datetime.year==%s' % y).write(filename % y, format_, **kwargs)
  232. else:
  233. self.write(filename, format_, **kwargs)
  234. def writey(self, filename, format_, **kwargs):
  235. self.writex(filename, format_, stations=False, **kwargs)
  236. def __init__(self, traces=None):
  237. """
  238. Extend Stream.__init__. Traces are now converted to Trace objects.
  239. """
  240. if not traces == None and len(traces) > 0 and not isinstance(traces[0], Trace):
  241. traces = [Trace(trace=tr) for tr in traces]
  242. super(Stream, self).__init__(traces=traces)
  243. if len(self) > 0 and not 'is_fft' in self[0].stats:
  244. self.setHI('is_fft', False)
  245. def copy(self, logit=False):
  246. """
  247. Return deepcopy of stream.
  248. """
  249. newstream = super(Stream, self).copy()
  250. if logit:
  251. log.info('Creating new stream %s by copying stream %s' % (newstream.hash, self.hash))
  252. return newstream
  253. def print_(self, mod=0):
  254. """
  255. Special printing method for header information.
  256. :param mod: 0-2
  257. """
  258. return_string = str(len(self.traces)) + ' Trace(s) in Stream:\n'
  259. return return_string + "\n".join([tr.print_(mod) for tr in self])
  260. def getHI(self, hf, operation=None):
  261. """
  262. Return list of header information of given header field.
  263. :param hf: name of header field eg. 'azi', 'event.depth'
  264. :return: list of header information
  265. """
  266. hi = []
  267. # if '.' in hf:
  268. # #parts = hf.split('.')
  269. # for tr in self:
  270. # hi.append(eval('tr.stats.'+hf))
  271. # else:
  272. if hf == 'id':
  273. hi = [tr.id for tr in self]
  274. else:
  275. hi = [eval('tr.stats.' + hf) for tr in self] # quick and dirty
  276. # quick but wrong ('.' in hf):
  277. # hi = [getattr(tr.stats, hf) for tr in self]
  278. # alternatives: operator.attrgetter
  279. # http://code.activestate.com/recipes/577346-getattr-with-arbitrary-depth/
  280. if operation is not None and isinstance(operation, basestring):
  281. hi = calculate(hi, operation)
  282. elif operation is not None:
  283. hi = operation(hi)
  284. return hi
  285. def calculate(self, operation, start=None, end=None, relative='starttime',
  286. return_trace=True, add_to_stream=False):
  287. log.info('Calculate %s of stream %s' % (operation, self.hash))
  288. data = self.getDataWindow(start=start, end=end, relative=relative)
  289. ret = calculate(data, operation)
  290. if return_trace:
  291. ret = Trace(data=ret, header=self[0].stats.copy())
  292. if add_to_stream:
  293. self.insert(0, ret)
  294. return ret
  295. def getArgMax(self, ret='ind', spline=False, spline_enhance=100, func=None):
  296. if ret == 'ind' and not spline:
  297. dtype = int
  298. elif ret in ('ind', 'time'):
  299. dtype = float
  300. elif ret == 'utc':
  301. dtype = UTCDateTime
  302. argmax = np.empty(len(self), dtype=dtype)
  303. dmax = np.empty(len(self))
  304. for i, tr in enumerate(self):
  305. argmax[i], dmax[i] = tr.getArgMax(ret=ret, spline=spline,
  306. spline_enhance=spline_enhance,
  307. func=func)
  308. return argmax, dmax
  309. def setHI(self, hf, hi):
  310. """
  311. Set header information in given header field.
  312. :param hf: name of header field eg. 'azi', 'event.depth'
  313. :param hi: list of entries or entry
  314. """
  315. if not isinstance(hi, list) and not isinstance(hi, np.ndarray):
  316. hi = [hi] * len(self)
  317. if '.' in hf:
  318. parts = hf.split('.')
  319. for i, tr in enumerate(self):
  320. tr.stats[parts[0]][parts[1]] = hi[i]
  321. else:
  322. for i, tr in enumerate(self):
  323. tr.stats[hf] = hi[i]
  324. def fft(self, * args, ** kwargs):
  325. """
  326. FFT
  327. """
  328. log.info('FFT stream %s: %s' % (self.hash, util.parameters()))
  329. for tr in self:
  330. tr.fft(*args, ** kwargs)
  331. def ifft(self, * args, ** kwargs):
  332. """
  333. IFFT
  334. """
  335. log.info('IFFT stream %s: %s' % (self.hash, util.parameters()))
  336. for tr in self:
  337. tr.ifft(*args, ** kwargs)
  338. def demean(self):
  339. """
  340. Subtract the mean from every trace.
  341. """
  342. log.info('Demean stream %s' % self.hash)
  343. for tr in self:
  344. tr.demean()
  345. def detrend(self):
  346. """
  347. Detrend every trace.
  348. """
  349. log.info('Detrend stream %s' % self.hash)
  350. for tr in self:
  351. tr.detrend()
  352. def integrate(self):
  353. """
  354. Integrate every trace.
  355. """
  356. log.info('Integrate stream %s' % self.hash)
  357. for tr in self:
  358. tr.integrate()
  359. def filter2(self, * args, ** kwargs):
  360. """
  361. Wrapper for Stream.filter.
  362. """
  363. log.info('Filter stream %s: %s' % (self.hash, util.parameters()))
  364. for tr in self:
  365. tr.filter2(*args, ** kwargs)
  366. def gauss(self, * args, ** kwargs):
  367. """
  368. Gauss filter
  369. """
  370. log.info('Gauss-Filter stream %s: %s' % (self.hash, util.parameters()))
  371. for tr in self:
  372. tr.gauss(*args, ** kwargs)
  373. def downsample2(self, new_sampling_rate):
  374. """
  375. Wrapper for Stream.downsample
  376. """
  377. log.info('Downsample stream %s: %s' % (self.hash, new_sampling_rate))
  378. for tr in self:
  379. tr.downsample2(new_sampling_rate)
  380. def taper2(self, *args, **kwargs):
  381. for tr in self:
  382. tr.taper2(*args, **kwargs)
  383. @add_doc(timeNorm)
  384. def timeNorm(self, *args, **kwargs):
  385. """
  386. Time normalization for preparation of cross correlation.
  387. Doc of sito.xcorr.timeNorm:
  388. """
  389. log.info('Time Normalization of stream %s: %s' % (self.hash, util.parameters()))
  390. for tr in self:
  391. try:
  392. tr.timeNorm(*args, **kwargs)
  393. except Exception as err:
  394. log.debug('Remove trace at %s because:\n%s' %
  395. ((tr.stats.starttime + 0.1).date, str(err)))
  396. self.remove(tr)
  397. @add_doc(spectralWhitening)
  398. def spectralWhitening(self, *args, **kwargs):
  399. """
  400. Spectral Whitening for preparation of cross correlation.
  401. Doc of sito.xcorr.spectralWhite:
  402. """
  403. log.info('Spectral Whitening of stream %s: %s' % (self.hash, util.parameters()))
  404. for tr in self:
  405. tr.spectralWhitening(*args, **kwargs)
  406. def trim2(self, start=None, end=None, relative='starttime', pad=False,
  407. nearest_sample=True):
  408. """
  409. Trim time window around ponset.
  410. :param start, end: seconds relative to around or UTCDateTime
  411. :param relative: name of header with UTCDateTime or seconds after starttime or
  412. UTCDateTime or UTCDateTime list
  413. See util.getTimeIntervall
  414. See obspy.core.Stream.trim
  415. """
  416. log.info('Trim stream %s: %s' % (self.hash, util.parameters()))
  417. start_utc, end_utc = util.imaging.getTimeIntervall(self, start, end, relative, ret_rel='utc')
  418. if isinstance(start, UTCDateTime) or type(start) == list:
  419. start = 'utc'
  420. if isinstance(end, UTCDateTime) or type(end) == list:
  421. end = 'utc'
  422. for i, tr in enumerate(self):
  423. tr.trim(start_utc[i], end_utc[i], pad, nearest_sample)
  424. tr.stats.filter += 'TR%s,%s' % (start, end)
  425. def slice2(self, start=None, end=None, relative='starttime', keep_empty_traces=False):
  426. """
  427. Returns new Stream object cut to the given start- and endtime.
  428. Does not copy the data but only passes a reference. Will by default
  429. discard any empty traces. Change the keep_empty_traces parameter to
  430. True to change this behaviour.
  431. """
  432. log.info('Select stream %s: %s' % (self.hash, util.parameters()))
  433. start_utc, end_utc = util.imaging.getTimeIntervall(self, start, end, relative, ret_rel='utc')
  434. if isinstance(start, UTCDateTime) or type(start) == list:
  435. start = 'utc'
  436. if isinstance(end, UTCDateTime) or type(end) == list:
  437. end = 'utc'
  438. traces = []
  439. for i, trace in enumerate(self):
  440. sliced_trace = trace.slice(start_utc[i], end_utc[i])
  441. if keep_empty_traces is False and not sliced_trace.stats.npts:
  442. continue
  443. # trace.stats.filter += 'SEL%s,%s' % (start, end)
  444. traces.append(sliced_trace)
  445. return self.__class__(traces=traces)
  446. def getDataWindow(self, start, end, relative):
  447. """
  448. Return array with data in time window (start, end).
  449. :param start, end: UTCDateTime or list of UTCDateTimes or floats (seconds)
  450. :return: numpy.array of shape (N_stream, N_newdata)
  451. See util.imaging.getDataWindow.
  452. """
  453. return util.imaging.getDataWindow(self, start, end, relative)
  454. def sort(self, keys=('network', 'station', 'location', 'channel',
  455. 'starttime', 'endtime'), reverse=False, logit=True):
  456. """
  457. Override obspy.core.Stream.sort.
  458. Every sortable header field can be in keys.
  459. """ # Sort channel reverse (Z,N,E).
  460. # Check the list and all items.
  461. if isinstance(keys, basestring):
  462. keys = [keys]
  463. if not isinstance(keys, (tuple, list)):
  464. raise TypeError('keys has to be tuple/list of strings or string')
  465. # Loop over all keys in reversed order.
  466. for _i in keys[::-1]:
  467. if _i == 'component':
  468. if self[0].stats.channel[-1] in 'ZNE':
  469. self.traces.sort(cmp=lambda x, y: cmp(
  470. 'ZNE'.index(x.stats.channel[-1]),
  471. 'ZNE'.index(y.stats.channel[-1])), reverse=reverse)
  472. elif self[0].stats.channel[-1] in 'LQT':
  473. self.traces.sort(cmp=lambda x, y: cmp(
  474. 'LQT'.index(x.stats.channel[-1]),
  475. 'LQT'.index(y.stats.channel[-1])), reverse=reverse)
  476. elif '.' in _i:
  477. parts = _i.split('.')
  478. self.traces.sort(key=lambda x: x.stats[parts[0]][parts[1]], reverse=reverse)
  479. else:
  480. self.traces.sort(key=lambda x: x.stats[_i], reverse=reverse)
  481. if logit:
  482. log.info('Sort stream %s after %s' % (self.hash, util.parameters(only=['keys', 'reverse'])))
  483. def select(self, expr=None, around=None, indegree=False, time=None, mark=None, event_id=None, logit=False, autocorr=False, xcorr=False, ** kwargs):
  484. """
  485. Extend Stream.select
  486. You cann additionaly choose a list of event_ids or
  487. a condition expr wherin 'st.' stands for 'trace.stats.'.
  488. """
  489. # make given component letter uppercase (if e.g. "z" is given)
  490. ms = super(Stream, self).select(** kwargs).traces
  491. if event_id and not isinstance(event_id, list):
  492. event_id = [event_id]
  493. if time:
  494. if isinstance(time, UTCDateTime):
  495. expr = '%r<=st.starttime' % time
  496. elif time[1] is None:
  497. expr = '%r<=st.starttime' % time[0]
  498. elif time[0] is None:
  499. expr = 'st.starttime<%r' % time[0]
  500. else:
  501. expr = '%r<=st.starttime<%r' % time
  502. elif autocorr:
  503. expr = "len(set(st.station.split('-')))==1"
  504. elif xcorr:
  505. expr = "len(set(st.station.split('-')))==2"
  506. if expr:
  507. expr = expr.replace('st.', 'trace.stats.')
  508. traces = []
  509. for trace in ms:
  510. # skip trace if any given criterion is not matched
  511. if event_id and trace.stats.event.id not in event_id:
  512. continue
  513. if mark != None and mark != bool(trace.stats.mark):
  514. continue
  515. if expr and not eval(expr):
  516. continue
  517. if around:
  518. lat, lon, maxdist = around
  519. if indegree:
  520. dist = util.gps2DistDegree(lat, lon,
  521. trace.stats.event.latitude,
  522. trace.stats.event.longitude)
  523. else:
  524. dist = gps2DistAzimuth(lat, lon,
  525. trace.stats.event.latitude,
  526. trace.stats.event.longitude)[0] / 1000.
  527. if dist > maxdist:
  528. continue
  529. traces.append(trace)
  530. newstream = self.__class__(traces=traces)
  531. if logit:
  532. log.info('Creating new stream %s by selecting traces from stream %s: %s' % (newstream.hash, self.hash, util.parameters()))
  533. return newstream
  534. def getStationList(self):
  535. """
  536. Return list of stations.
  537. """
  538. station_list = []
  539. for trace in self:
  540. station = trace.stats['station']
  541. if station not in station_list: station_list.append(station)
  542. return station_list
  543. def getStations(self, file_):
  544. """
  545. Return Station instance.
  546. """
  547. st_list = self.getStationList()
  548. if isinstance(file_, basestring):
  549. import stations
  550. st = stations.Stations.read(file_)
  551. else:
  552. st = file_.copy()
  553. st.pick(' '.join(st_list))
  554. return st
  555. def getEvents(self):
  556. """
  557. Return Events instance.
  558. """
  559. import events
  560. return events.Events.readFromStream(self)
  561. def writeGMTfile(self, filename, start= -20, end=20, reductionFactor=10):
  562. """
  563. Write a GMT readable file with data.
  564. """
  565. str_list = []
  566. N_stream = len(self)
  567. for i, trace in enumerate(self):
  568. t1 = trace.stats.starttime - trace.stats.ponset # sh['P-ONSET']
  569. t2 = trace.stats.endtime - trace.stats.ponset # sh['P-ONSET']
  570. N_data = trace.stats.npts
  571. t = np.linspace(t1, t2, N_data)
  572. str_list.append('> trace %d\n' % (i + 1))
  573. for j, datapoint in enumerate(trace.data):
  574. if j % reductionFactor == 0 and t[j] > start and t[j] < end:
  575. str_list.append(' %f %f %f\n' % (t[j], N_stream - i, datapoint))
  576. with open(filename, 'w') as file_: file_.write(''.join(str_list))
  577. def check(self, check_ponset=True, check_duplettes=True, delete_incompatible_traces=True):
  578. """
  579. Check data for combatibilty for rotation and receiver function methods.
  580. Data must be in aranged in tuples of 3 with components ZNE.
  581. Some of the header entries of traces belonging to one station and event
  582. must be equal.
  583. Use for example sort(['event.id', 'station', 'component'] before.
  584. """
  585. # self.sort(['station', 'eventno', 'component'])
  586. # if not ( (np.array(self.getHI('npts'))==self[0].getHI('ntps')).all()
  587. # and (np.array(self.getHI('sampling_rate'))==self[0].getHI('sampling_rate')).all()):
  588. # log.error('Traces must have same sampling_rate and npts.')
  589. # return False
  590. retcode = True
  591. i = 0
  592. j = 0
  593. if check_duplettes:
  594. while i < len(self) - 1:
  595. st1 = self[i].stats
  596. st2 = self[i + 1].stats
  597. if st1.event.id == st1.event.id and st1.station == st2.station and st1.channel == st2.channel:
  598. if st1.npts >= st2.npts:
  599. index = i + 1
  600. self.pop(i + 1)
  601. else:
  602. index = i
  603. if delete_incompatible_traces:
  604. log.debug('Delete incompatible trace %s (duplette)' % (self[index]))
  605. self.pop(index)
  606. j += 1
  607. else:
  608. raise ValueError('Stream %s incompatible at least at index %d' % (self.hash, index))
  609. i += 1
  610. i = 0
  611. while i < len(self) - 2:
  612. st1 = self[i].stats
  613. st2 = self[i + 1].stats
  614. st3 = self[i + 2].stats
  615. if not (
  616. (st1.channel.endswith('Z') and st2.channel.endswith('N') and st3.channel.endswith('E') or
  617. st1.channel.endswith('L') and st2.channel.endswith('Q') and st3.channel.endswith('T')) and
  618. st1.station == st2.station == st3.station and
  619. st1.event.id == st2.event.id == st3.event.id and
  620. st1.npts == st2.npts == st3.npts and
  621. st1.sampling_rate == st2.sampling_rate == st3.sampling_rate and
  622. not np.any(np.isnan(self[i].data)) and not np.any(np.isnan(self[i + 1].data)) and not np.any(np.isnan(self[i + 2].data))
  623. ) \
  624. or (check_ponset and not st1.starttime + 10 < st1.ponset < st1.endtime - 30):
  625. retcode = False
  626. if delete_incompatible_traces:
  627. log.debug('Delete incompatible trace %s' % (self[i]))
  628. self.pop(i)
  629. if i == len(self) - 3:
  630. log.debug('Delete incompatible trace %s' % (self[i]))
  631. self.pop(i)
  632. log.debug('Delete incompatible trace %s' % (self[i]))
  633. self.pop(i)
  634. else:
  635. raise ValueError('Stream %s incompatible at least at index %d' % (self.hash, i))
  636. j += 1
  637. else:
  638. i += 3
  639. if len(self) % 3 != 0:
  640. log.warning('Stream must have length 3N for rotation or rf.')
  641. retcode = False
  642. if delete_incompatible_traces:
  643. i = len(self) - 1
  644. log.debug('Delete incompatible trace %s' % (self[i]))
  645. self.pop(i)
  646. j += 1
  647. if len(self) % 3 != 0:
  648. i = len(self) - 1
  649. log.debug('Delete incompatible trace %s' % (self[i]))
  650. self.pop(i)
  651. j += 1
  652. if not retcode and delete_incompatible_traces:
  653. log.warning('Delete %s incompatible traces in stream %s, remaining %s traces' % (j, self.hash, len(self)))
  654. return retcode
  655. def polar(self, start, end, relative='ponset', retlin=False):
  656. """
  657. Calculate polarization.
  658. Save azimuth (angle between North and largest eigenvalue (L-component)
  659. in earth surface plane)
  660. and inclination (angle between vertical and largest eigenvalue)
  661. in header (lazi and linci) of L-component.
  662. """
  663. data = self.getDataWindow(start, end, relative)
  664. if retlin: lin_list = []
  665. i = 0
  666. j = 0
  667. while j < len(self) - 2:
  668. try:
  669. inci, azi, lin = rf.polar(data[i + 2, :],
  670. data[i + 1, :], data[i, :]) # E, N, Z = x, y, z
  671. if retlin: lin_list.append(lin)
  672. # wrong documentation in seis.polar.polarization_py ?
  673. # angle between E (x-axis) clockwise to eigenvalue -> angle between N clockwise to eigenvalue
  674. self[j].stats.lazi = azi
  675. self[j + 1].stats.lazi = azi
  676. self[j + 2].stats.lazi = azi
  677. # inclination : angle between Z and new L-Component
  678. self[j].stats.linci = inci
  679. self[j + 1].stats.linci = inci
  680. self[j + 2].stats.linci = inci
  681. i += 3
  682. j += 3
  683. except ValueError:
  684. log.warning('Stream[%d:%d]: error while calculating polarization-> remove traces, remaining %s traces' % (i, i + 3, len(self)))
  685. log.warning('Stream[%d:%d]: %s' % (i, i + 3, self[j:j + 3]))
  686. self.pop(j + 2)
  687. self.pop(j + 1)
  688. self.pop(j)
  689. i += 3
  690. if retlin: return lin_list
  691. def rotateZNE2LQT(self, start= -2, end=10, relative='ponset', usetheo=False):
  692. """
  693. Rotate from ZNE to LQT.
  694. Call check before.
  695. See obspy.signal.rotate_ZNE_LQT.
  696. """
  697. log.info('Rotate stream %s: %s' % (self.hash, util.parameters()))
  698. self.polar(start, end, relative)
  699. if usetheo:
  700. azi = self.select(component='Z').getHI('azi')
  701. inci = self.select(component='Z').getHI('inci')
  702. else:
  703. azi = self.select(component='Z').getHI('lazi')
  704. inci = self.select(component='Z').getHI('linci')
  705. for i in range(len(self) // 3):
  706. # print azi[i], inci[i]
  707. Z, N, E = self[3 * i].data, self[3 * i + 1].data, self[3 * i + 2].data
  708. L, Q, T = obspy.signal.rotate_ZNE_LQT(Z, N, E, azi[i], inci[i])
  709. self[3 * i].data, self[3 * i + 1].data, self[3 * i + 2].data = L, Q, T
  710. for tr in self:
  711. tr.stats.filter += 'Rot'
  712. tr.stats.channel = tr.stats.channel[:-1] + \
  713. 'LQT'['ZNE'.index(tr.stats.channel[-1])]
  714. def rotateLQT2ZNE(self, usetheo=False):
  715. """
  716. Backrotate from LQT to ZNE.
  717. See obspy.signal.rotate_LQT_ZNE.
  718. """
  719. log.info('Back rotate stream %s' % (self.hash))
  720. azi = self.select(component='L').getHI('azi' if usetheo else 'lazi')
  721. inci = self.select(component='L').getHI('inci' if usetheo else 'linci')
  722. for i in range(len(self) // 3):
  723. L, Q, T = self[3 * i].data, self[3 * i + 1].data, self[3 * i + 2].data
  724. # Z, N, E = util.rotateLQT2ZNE(L, Q, T, azi[i], inci[i])
  725. Z, N, E = obspy.signal.rotate_LQT_ZNE(L, Q, T, azi[i], inci[i])
  726. self[3 * i].data, self[3 * i + 1].data, self[3 * i + 2].data = Z, N, E
  727. for tr in self:
  728. tr.stats.filter += 'Tor'
  729. tr.stats.channel = tr.stats.channel[:-1] + \
  730. 'ZNE'['LQT'.index(tr.stats.channel[-1])]
  731. @add_doc(rotateZNE2LQT)
  732. def rotate(self, * args, ** kwargs):
  733. """
  734. See Stream.rotateZNE2LQT:
  735. """
  736. self.rotateZNE2LQT (*args, ** kwargs)
  737. @add_doc(rotateLQT2ZNE)
  738. def brotate(self, * args, ** kwargs):
  739. """
  740. See Stream.rotateLQT2ZNE:
  741. """
  742. self.rotateLQT2ZNE (*args, ** kwargs)
  743. def rotateNE2RT(self, start= -2, end=10, relative='ponset'):
  744. """
  745. Rotate from NE to RT.
  746. Call check before.
  747. See obspy.signal.rotate_NE_RT.
  748. """
  749. log.info('Rotate stream from NE to RT %s: %s' % (self.hash, util.parameters()))
  750. azi = self.select(component='Z').getHI('azi')
  751. self.polar(start, end, relative)
  752. for i in range(len(self) // 3):
  753. # print azi[i], inci[i]
  754. N, E = self[3 * i + 1].data, self[3 * i + 2].data
  755. R, T = obspy.signal.rotate_NE_RT(N, E, azi[i])
  756. # we want to have a right handed system similar to LQT
  757. # R points away from source
  758. self[3 * i + 1].data, self[3 * i + 2].data = -R, T
  759. for tr in self:
  760. tr.stats.filter += 'RotRT'
  761. tr.stats.channel = tr.stats.channel[:-1] + \
  762. 'ZRT'['ZNE'.index(tr.stats.channel[-1])]
  763. def receiverf(self, water=0.01, gauss=2, tshift=10, pad=0, window=None, start= -10, end=30, where='ponset', lenslope=5, return_real=True, normalize=True):
  764. """
  765. Apply deconvolution in frequency domain.
  766. Treat traces with index 3*i as source and traces with index 3*i+1 and
  767. 3*i+2 as response.
  768. :param water: waterlevel to stabilize the deconvolution (relative to data maximum)
  769. :param gauss: Gauss parameter of averaging function (std of LP-filter)
  770. :param tshift: shift the resulting function by that amount
  771. :param pad: multiply number of samples used for fft by 2**pad.
  772. :param window, start, end, where, lenslope: use only window (start,end) around
  773. where of type window (with lenslope seconds of smoothing) of source function
  774. :param return_real: just use the real part
  775. See rf.deconv.
  776. """
  777. log.info('Deconvolve stream %s: %s' % (self.hash, util.parameters()))
  778. for i in range(len(self) // 3):
  779. samp = self[3 * i].stats.sampling_rate
  780. if window != None:
  781. src = self[3 * i:3 * i + 1].copy()
  782. src.window(start, end, where, window, lenslope)
  783. src = src[0].data
  784. else:
  785. src = self[3 * i].data
  786. start = 0
  787. end = 0
  788. rsp = [self[3 * i].data, self[3 * i + 1].data, self[3 * i + 2].data]
  789. rf_resp = rf.deconvf(rsp, src, samp, water, gauss, tshift, pad, normalize=normalize)
  790. # multiply -1 on Q and T component
  791. if return_real:
  792. self[3 * i].data, self[3 * i + 1].data, self[3 * i + 2].data = rf_resp[0].real, -rf_resp[1].real, -rf_resp[2].real
  793. else:
  794. self[3 * i].data, self[3 * i + 1].data, self[3 * i + 2].data = rf_resp[0], -rf_resp[1], -rf_resp[2]
  795. for tr in self:
  796. tr.stats.filter += 'RF%s,%s,%swin%s,%s' % (water, gauss, tshift, start, end)
  797. tr.stats.ponset = tr.stats.starttime + tshift
  798. tr.stats.tshift = tshift
  799. if not normalize:
  800. maxi = np.mean(self[::3].getFunc(np.max))
  801. for tr in self:
  802. tr.data /= maxi
  803. def receivert(self, winsrc=(-20, 80, 5), winrsp=(-20, 80), winrf=(-20, 80), where='ponset', spiking=1, normalize=True):
  804. """
  805. Aply deconvolution in time-domain.
  806. """
  807. # if not winsrc[1]- winsrc[0]>= winrsp[1]-winrsp[0] >= winrf[1]-winrf[0]:
  808. # raise ValueError('Source window has to be bigger than Response window has to be bigger than RF window!')
  809. log.info('Deconvolve stream %s: %s' % (self.hash, util.parameters()))
  810. for i in range(len(self) // 3):
  811. samp = self[3 * i].stats.sampling_rate
  812. onset = self[3 * i].stats.get(where)
  813. src = self[3 * i:3 * i + 1].slice(onset + winsrc[0], onset + winsrc[1]).copy()
  814. src = src[0].data
  815. # src *= util.main.getWindow('tukey', len(src), 2*winsrc[2]/(winsrc[1]-winsrc[0]))
  816. src *= cosTaper(len(src), 2 * winsrc[2] / (winsrc[1] - winsrc[0]))
  817. rsp = self[3 * i:3 * i + 3].slice(onset + winrsp[0], onset + winrsp[1])
  818. rsp = [tr.data for tr in rsp]
  819. time_rf = winrf[1] - winrf[0]
  820. shift = int(samp * ((winrsp[1] - winrsp[0] - winsrc[1] + winsrc[0] - time_rf) / 2 + winrsp[0] - winsrc[0] - winrf[0])) # shift_zero == 0 for winrsp[1]-winrsp[0] = winsrc[1]-winsrc[0]+time_rf + 2*(-winrsp[0]+winsrc[0]+winrf[0])
  821. rf_resp = rf.deconvt(rsp, src, spiking, shift, length=int(time_rf * samp), normalize=normalize)
  822. # multiply -1 on Q and T component
  823. self[3 * i].data, self[3 * i + 1].data, self[3 * i + 2].data = rf_resp[0], -rf_resp[1], -rf_resp[2]
  824. for tr in self[3 * i:3 * i + 3]:
  825. tr.stats.filter += 'RFT%swins%s,%s,%s' % (spiking, winsrc, winrsp, winrf)
  826. tr.stats.ponset = tr.stats.starttime - winrf[0]
  827. tr.stats.tshift = -winrf[0]
  828. tr.stats.npts = len(tr.data)
  829. if not normalize:
  830. maxi = np.mean(self[::3].getFunc(np.max))
  831. for tr in self:
  832. tr.data /= maxi
  833. def receiverSH(self, start, end, spiking=1, cut1= -20, cut2=100):
  834. """
  835. Deconvolution using Seismic Handler.
  836. At the moment not working because writing of full header (lazi, linci)
  837. is not supported anymore.
  838. """
  839. log.info('Deconvolve stream %s using SH: %s' % (self.hash, util.parameters()))
  840. import tempfile
  841. tempdir = tempfile.gettempdir()
  842. self.write(tempdir + '/FORSH', 'Q', Qheader=True)
  843. command = "cd " + tempdir + """ &&
  844. rm -f ./FORPY.Q?? &&
  845. rm -f ./FORPY.HAT &&
  846. mv ./FORSH.HAT ./FORPY.HAT&&
  847. shc<<END
  848. read FORSH all
  849. al all p-onset
  850. dec3 %s;%s;%s
  851. cut all %s %s
  852. write FORPY all
  853. quit y
  854. END""" % (start, end, spiking, cut1, cut2)
  855. log.debug(command)
  856. log.info(util.runBash(command))
  857. stream = read(tempdir + '/FORPY.QHD', ignore_starttime=True)
  858. for i, tr in enumerate(stream):
  859. del tr.stats.sh
  860. tr.stats.filter += 'RFSH%s,%swin%s,%s' % (-cut1, spiking, start, end)
  861. tr.stats.ponset = tr.stats.starttime - cut1
  862. tr.stats.tshift = -cut1
  863. self[i] = tr
  864. def moveout(self, model='iasp91', phase='Ps', p0=6.4):
  865. """
  866. In-place moveout correction.
  867. """
  868. log.info('Moveout correction for stream %s: %s' % (self.hash, util.parameters()))
  869. if model == 'iasp91':
  870. model = util.Iasp91()
  871. for tr in self:
  872. tr.moveout(model, phase, p0)
  873. def zmigr(self, model='iasp91', phase='Ps', zmax=750, dz=0.5):
  874. """
  875. Z-migration
  876. Not working!
  877. """
  878. log.info('Z-Migration for stream %s: %s' % (self.hash, util.parameters()))
  879. if model == 'iasp91':
  880. model = util.Iasp91()
  881. for tr in self:
  882. tr.zmigr(model, phase, zmax, dz)
  883. def writeStationPosition(self, file_):
  884. """
  885. Write the station coordinates in stats header fields slat, slon, selev.
  886. """
  887. stations = self.getStations(file_)
  888. for tr in self:
  889. tr.stats['slat'] = stations[tr.stats.station].latitude
  890. tr.stats['slon'] = stations[tr.stats.station].longitude
  891. tr.stats['selev'] = stations[tr.stats.station].elevation
  892. def pspier(self, depth, filename=None, other_header=None):
  893. """
  894. Write piercing point information in header fields rpier, plat, plon
  895. :param depth: depth of piercing point
  896. :param file_: file_ with station information, if None the header entries
  897. must already be set
  898. """
  899. log.info('Pspier stream %s: %s' % (self.hash, util.parameters(exclude=['file_'])))
  900. if filename:
  901. self.writeStationPosition(filename)
  902. # if component:
  903. # stream = self.select(component=component)
  904. # else:
  905. # stream = self
  906. slat = np.array(self.getHI('slat'))
  907. slon = np.array(self.getHI('slon'))
  908. slowness = np.array(self.getHI('slowness'))
  909. azi = np.array(self.getHI('azi'))
  910. rpier, plat, plon = util.pspier(depth, slat, slon, slowness, azi)
  911. if other_header is None:
  912. self.setHI('rpier', rpier)
  913. self.setHI('plat', plat)
  914. self.setHI('plon', plon)
  915. else:
  916. self.setHI('rpier' + other_header, rpier)
  917. self.setHI('plat' + other_header, plat)
  918. self.setHI('plon' + other_header, plon)
  919. for tr in self:
  920. tr.stats.filter += 'Pspier%d' % depth
  921. def simpleStack(self, overwrite=True, insert=False, component='all'):
  922. """
  923. Calculate sum of traces.
  924. :param overwrite: if true method overwrites some stats with mean values
  925. :param insert: if true inserts sum at position 0 in stream
  926. :return: sumation trace
  927. """
  928. stream = self._getStreamFromComponent(component)
  929. tr = stream[0].copy()
  930. tr.data = np.mean([i.data for i in stream], axis=0)
  931. tr.stats.filter += 'Stack%d' % len(stream)
  932. tr.stats['count'] = len(stream)
  933. if overwrite:
  934. for header in 'azi dist lazi ldist event.depth event.latitude event.longitude event.magnitude'.split():
  935. if (stream[0].stats.has_key(header) or
  936. (header.startswith('event.') and
  937. stream[0].stats.has_key('event') and
  938. stream[0].stats.event.has_key(header[6:]))):
  939. setattr(tr.stats, header, np.mean(self.getHI(header)))
  940. # exec("tr.stats.%s = np.mean(self.getHI('%s'))" % (header, header))
  941. if insert:
  942. self.insert(0, tr)
  943. return tr
  944. def phaseStack(self, order=1., component='all'):
  945. """
  946. return stack of instantanious phase
  947. """
  948. stream = self._getStreamFromComponent(component)
  949. N = stream[0].stats.npts
  950. phases = np.zeros((len(stream), N), complex)
  951. stack_trace = stream[0].copy()
  952. for i, trace in enumerate(stream):
  953. analytic_signal, envelope = obspy.signal.cpxtrace.envelope(trace.data)
  954. phases[i, :] = (analytic_signal / envelope)[:N]
  955. stack_trace.data = np.abs(np.sum(phases ** order, axis=0)) / len(stream)
  956. stack_trace.stats.filter += 'PST'
  957. return stack_trace
  958. def PWS(self, overwrite=False, order=1, component='Q'):
  959. """
  960. return phase weighted stack
  961. """
  962. tr1 = self.phaseStack(order=order, component=component)
  963. tr2 = self.simpleStack(overwrite=overwrite, component=component)
  964. tr2.data *= tr1.data
  965. tr2.stats.filter += 'PWS'
  966. return tr2
  967. def _getStreamFromComponent(self, component):
  968. if component in 'QLTZNE':
  969. return self.select(component=component)
  970. else:
  971. return self
  972. def slantStackOneDir(self, slope, component='Q', p0=7.0, order=1):
  973. stream = self._getStreamFromComponent(component)
  974. if slope != 0:
  975. stream = stream.copy()
  976. for tr in stream:
  977. shift = -slope * (tr.stats.slowness ** 2 - p0 ** 2) * tr.stats.sampling_rate
  978. shiftN = int(round(shift))
  979. data1 = np.roll(tr.data, shiftN)
  980. data2 = np.roll(data1, int(shiftN <= shift))
  981. tr.data = data1 + (data2 - data1) * abs(shift - shiftN)
  982. ret_tr = stream.PWS(order=order)
  983. ret_tr.stats.filter += 'slant'
  984. ret_tr.stats.slope = slope
  985. ret_tr.stats.slowness = p0
  986. return ret_tr
  987. def slantStack(self, slopes=np.linspace(-0.1, 0.1, 21), component='Q', p0=7.0, order=1):
  988. """
  989. return slant stack
  990. """
  991. stack = self.__class__()
  992. for slope in slopes:
  993. tr = self.slantStackOneDir(slope, component, p0, order)
  994. stack.append(tr)
  995. return stack
  996. def getMaxima(self):
  997. """
  998. Return np.array of maxima for every trace.
  999. """
  1000. return np.array([tr.data.argmax() * 1. / tr.stats.sampling_rate for tr in self])
  1001. def getFunc(self, func):
  1002. """
  1003. Return np.array of func for every trace.
  1004. """
  1005. return np.array([func(tr.data) for tr in self])
  1006. def getArg(self, func):
  1007. """
  1008. Return np.array of maxima for every trace.
  1009. """
  1010. return np.array([func(tr.data) * 1. / tr.stats.sampling_rate for tr in self])
  1011. def correlate(self, tr2, shift_sec, start=None, end=None,
  1012. relative='starttime', demean=False, normalize=True):
  1013. """
  1014. Correlate all traces with given trace and use given maximum shift.
  1015. """
  1016. from sito.xcorr import xcorrf
  1017. data = self.getDataWindow(start, end, relative)
  1018. data2 = Stream([tr2]).getDataWindow(start, end, relative).flatten()
  1019. if shift_sec == 0:
  1020. cors = []
  1021. else:
  1022. cors = Stream()
  1023. for i, tr in enumerate(self):
  1024. sr = tr.stats.sampling_rate
  1025. new_data = xcorrf(data2, data[i, :].flatten(), int(shift_sec * sr),
  1026. demean=demean, normalize=normalize)
  1027. if shift_sec == 0:
  1028. cors.append(new_data[0])
  1029. else:
  1030. tr_cor = Trace(data=new_data[::-1], header=tr.stats.copy())
  1031. tr_cor.stats.npts = len(new_data)
  1032. tr_cor.stats.sampling_rate = (tr_cor.stats.npts - 1.) / 2 / shift_sec
  1033. tr_cor.stats.is_fft = False
  1034. cors.append(tr_cor)
  1035. return cors
  1036. def correlate_numpy(self, tr2, start, end, start2, end2, relative='starttime',
  1037. normalize=True, mode='valid'):
  1038. data1 = self.getDataWindow(start, end, relative)
  1039. data2 = Stream([tr2]).getDataWindow(start2, end2, relative).flatten()
  1040. data1[0, :] = Stream([tr2]).getDataWindow(start, end, relative).flatten()
  1041. cors = Stream()
  1042. stdev = 1.
  1043. if normalize:
  1044. cumsum = np.cumsum(data2 ** 2)
  1045. ind = data1.shape[1]
  1046. stdev1 = np.max(cumsum[ind:] - cumsum[:-ind]) ** 0.5
  1047. for i, tr in enumerate(self):
  1048. new_data = np.correlate(data2, data1[i, :].flatten(), mode)
  1049. if normalize:
  1050. stdev = (np.sum(data1[i, :] ** 2)) ** 0.5 * stdev1
  1051. new_data /= stdev
  1052. tr_cor = Trace(data=new_data[::-1], header=tr.stats.copy())
  1053. tr_cor.stats.npts = len(new_data)
  1054. tr_cor.stats.is_fft = False
  1055. cors.append(tr_cor)
  1056. return cors
  1057. def norm(self, method='single', value=1.):
  1058. """
  1059. Normalize all traces in stream to value.
  1060. :param method: 'single' normalize all traces independent.
  1061. 'all' normalize all traces with the same factor.
  1062. """
  1063. log.info('Normalize stream %s: %s' % (self.hash, util.parameters()))
  1064. values = np.zeros(len(self))
  1065. if method == 'all':
  1066. for i, tr in enumerate(self):
  1067. values[i] = np.max(np.abs(tr.data))
  1068. fak = value / max(values)
  1069. for tr in self:
  1070. tr.norm(fak=fak)
  1071. else:
  1072. for tr in self:
  1073. tr.norm(value)
  1074. @add_doc(imaging.plot2)
  1075. def plot2(self, * args, ** kwargs):
  1076. """
  1077. Plot always 3 traces in one axis
  1078. Assuming three components in a row. The order is Z,N,E,Z,N,E, etc.
  1079. Plotting around ponset in time window.
  1080. Doc imaging.plot2:
  1081. """
  1082. log.info('Plot2 stream %s: %s' % (self.hash, util.parameters()))
  1083. return imaging.plot2(self, * args, ** kwargs)
  1084. @add_doc(imaging.plotTrace)
  1085. def plotTrace(self, *args, **kwargs):
  1086. log.info('plot first trace of stream %s: %s' % (self.hash, util.parameters()))
  1087. return self[0].plotTrace(*args, **kwargs)
  1088. @add_doc(imaging.Plot.__init__)
  1089. def plot_(self, * args, ** kwargs):
  1090. """
  1091. Plot all traces in one axis around ponset in time window with filling.
  1092. Doc imaging.Plot:
  1093. """
  1094. log.info('Plot stream %s: %s' % (self.hash, util.parameters()))
  1095. return imaging.Plot(self, * args, ** kwargs)
  1096. # @add_doc(imaging.plotComb)
  1097. def plotComb(self, * args, ** kwargs):
  1098. """
  1099. Plot all traces in one axis around ponset in time window with filling.
  1100. Doc imaging.Plot:
  1101. """
  1102. log.info('PlotComb stream %s: %s' % (self.hash, util.parameters()))
  1103. return imaging.plotComb(self, * args, ** kwargs)
  1104. # @add_doc(imaging.plot3)
  1105. # def plot3(self, * args, ** kwargs):
  1106. # """
  1107. # Plot all traces in one axis around ponset in time window with filling.
  1108. #
  1109. # Doc imaging.plot3:
  1110. # """
  1111. # log.info('Plot3 stream %s: %s' % (self.hash, util.parameters()))
  1112. # return imaging.plot3(self, * args, ** kwargs)
  1113. @add_doc(imaging.plotRF)
  1114. def plotRF(self, * args, ** kwargs):
  1115. """
  1116. Plot all traces in one axis around ponset in time window with filling.
  1117. Doc imaging.plotRF:
  1118. """
  1119. log.info('PlotRF stream %s: %s' % (self.hash, util.parameters()))
  1120. return imaging.plotRF(self, * args, ** kwargs)
  1121. @add_doc(imaging.plotProfile)
  1122. def plotProfile(self, * args, ** kwargs):
  1123. """
  1124. Plot all traces along profile.
  1125. Doc imaging.plotProfile:
  1126. """
  1127. log.info('PlotProfile stream %s: %s' % (self.hash, util.parameters()))
  1128. return imaging.plotProfile(self, * args, ** kwargs)
  1129. @add_doc(imaging.plotXcorr)
  1130. def plotXcorr(self, * args, ** kwargs):
  1131. """
  1132. Plot all traces along profile.
  1133. Doc imaging.plotXcorr:
  1134. """
  1135. log.info('PlotXcor stream %s: %s' % (self.hash, util.parameters()))
  1136. return imaging.plotXcorr(self, * args, ** kwargs)
  1137. @add_doc(imaging.plotXcorrVsDist)
  1138. def plotXcorrVsDist(self, * args, ** kwargs):
  1139. """
  1140. Plot all traces along profile.
  1141. Doc imaging.plotXcorrVsDist:
  1142. """
  1143. log.info('PlotXcor stream %s: %s' % (self.hash, util.parameters()))
  1144. return imaging.plotXcorrVsDist(self, * args, ** kwargs)
  1145. @add_doc(imaging.plotFilterbands)
  1146. def plotFilterbands(self, * args, ** kwargs):
  1147. """
  1148. Plot all traces along profile.
  1149. Doc imaging.plotXcorr:
  1150. """
  1151. log.info('PlotFilterbands stream %s: %s' % (self.hash, util.parameters()))
  1152. return imaging.plotFilterbands(self, * args, ** kwargs)
  1153. @add_doc(Trace.plotPSD)
  1154. def plotPSD(self, *args, ** kwargs):
  1155. """
  1156. Plot PSD of first trace.
  1157. Doc Trace..plotPSD:
  1158. """
  1159. return self[0].plotPSD(*args, ** kwargs)
  1160. def fftfreq(self):
  1161. return self[0].fftfreq()
  1162. def addXcorrSides(self):
  1163. self.setHI('starttime', UTCDateTime('2000-01-01'))
  1164. for tr in self:
  1165. N = tr.stats.npts
  1166. tr.data = tr.data[N // 2:] + tr.data[N // 2 + N % 2 - 1::-1]
  1167. # @add_doc(imaging.plotStack)
  1168. # def plotStack(self, * args, ** kwargs):
  1169. # """
  1170. # Plot all traces of a stack.
  1171. #
  1172. # Doc imaging.plotStack:
  1173. # """
  1174. # log.info('PlotStack stream %s: %s' % (self.hash, util.parameters()))
  1175. # return imaging.plotStack(self, * args, ** kwargs)
  1176. def getBinnedStream(self, bins, bins2=None, header='plon', header2='plat'):
  1177. """
  1178. Return a 'binned' Stream.
  1179. """
  1180. newstream = Stream()
  1181. st = self.copy()
  1182. st.sort(header, logit=False)
  1183. if not bins2:
  1184. for i in range(len(bins) - 1):
  1185. temp = st.select(expr='st.' + header + '>= ' + repr(bins[i]) + ' and st.' + header + '< ' + repr(bins[i + 1]))
  1186. if len(temp) > 0:
  1187. tr_sum = temp.simpleStack()
  1188. if 'time' in header:
  1189. dif_s = 0.5 * (bins[i + 1] - bins[i]) + \
  1190. (bins[i] - temp[0].stats[header])
  1191. for header2 in ['starttime', 'endtime', 'ponset', 'sonset']:
  1192. if header2 in temp[0].stats.keys():
  1193. tr_sum.stats[header2] = temp[0].stats[header2] + dif_s
  1194. else:
  1195. tr_sum.stats[header] = bins[i] + 0.5 * (bins[i + 1] - bins[i])
  1196. newstream.append(tr_sum)
  1197. else:
  1198. for i in range(len(bins) - 1):
  1199. for j in range(len(bins2) - 1):
  1200. temp = st.select(expr='st.' + header + '>= ' + repr(bins[i]) + ' and st.' + header + '< ' + repr(bins[i + 1]) +
  1201. ' and st.' + header2 + '>= ' + repr(bins2[j]) + ' and st.' + header2 + '< ' + repr(bins2[j + 1]))
  1202. if len(temp) > 0:
  1203. tr_sum = temp.simpleStack()
  1204. tr_sum.stats[header] = bins[i] + 0.5 * (bins[i + 1] - bins[i])
  1205. tr_sum.stats[header2] = bins2[j] + 0.5 * (bins2[j + 1] - bins2[j])
  1206. newstream.append(tr_sum)
  1207. if not bins2:
  1208. bins2 = np.array([])
  1209. log.info('Creating new binned stream %s from stream %s: %s' % (newstream.hash, self.hash, util.parameters(add={'bins': 'array%d' % len(bins), 'bins2': 'array%d' % len(bins2)})))
  1210. return newstream
  1211. def setEventIDs(self):
  1212. """
  1213. Set event ids in Stream.
  1214. """
  1215. log.info('Set event id in stream %s' % (self.hash))
  1216. for i, tr in enumerate(self):
  1217. tr.stats.event['id'] = str(i // 3)
  1218. def signoise(self, winsig, winnoise, relative='ponset'):
  1219. """
  1220. Calculate signoise ratio by dividing the maxima of the given time windows.
  1221. Ratio is written in header field 'signoise'.
  1222. """
  1223. log.info('Calculate SigNoise ratio for stream %s: %s' % (self.hash, util.parameters()))
  1224. for tr in self:
  1225. tr.signoise(winsig, winnoise, relative)
  1226. def window(self, start, end, relative='ponset', window='tukey', lenslope=10):
  1227. """
  1228. Window between start and end with args passed to _window function.
  1229. """
  1230. start, end = util.imaging.getTimeIntervall(self, start, end, relative, ret_rel='starttime')
  1231. for i, tr in enumerate(self):
  1232. tr._window(start[i], end[i], window, lenslope)
  1233. def getReasons(self):
  1234. """
  1235. Get a dictionary of reasons for the string to be marked.
  1236. """
  1237. reason_keys = ['SN', 'angle', 'maxL', 'sigQ', 'SN L', 'SN Q', 'broad']
  1238. dict_ = {}
  1239. mark = self.getHI('mark')
  1240. for i, key in enumerate(reason_keys):
  1241. dict_[key] = mark.count(i + 1)
  1242. return dict_
  1243. def setPhase(self, phase):
  1244. """
  1245. Set ponset and dependent information to another phase (eg. PP)
  1246. """
  1247. for trace in self:
  1248. arrival = util.ttt(trace.stats.dist, trace.stats.event.depth).findPhase(phase)
  1249. if arrival != None:
  1250. onset = trace.stats.event.datetime + arrival.time
  1251. stats = AttribDict({'inci': arrival.inci, 'ponset':onset, 'slowness':arrival.slow})
  1252. trace.stats.update(stats)
  1253. else:
  1254. trace.stats['mark'] = True
  1255. def afarm(self, context='normal', signoise=False, signoiseQ=False, maxL=False, sigQ=False, broad=False, angle=360, remove=False):
  1256. # def afarm(self, context='normal', signoise=1.1, signoiseQ=None, maxL=0.5, sigQ=True, broad=True, angle=360, remove=True):
  1257. """
  1258. Mark traces and remove them if they fullfill special conditions.
  1259. marked because ...
  1260. signal:
  1261. mark 1: Signal/Noise ratio on signal is bigger than signoise
  1262. mark 2: difference of theoretical azimuth and azimuth calculated by polarisation is bigger than angle
  1263. RF:
  1264. mark 3: maxL * maximum in (-0.5, 0.5)s is smaller than maximum in (3, 20)
  1265. mark 4: signal on Q
  1266. mark 5: signoise on L
  1267. mark 6: signoise on Q
  1268. mark 7: broad signal
  1269. See source code.
  1270. """
  1271. try:
  1272. self.getHI('mark')
  1273. except KeyError:
  1274. self.setHI('mark', False)
  1275. throwed = self.__class__()
  1276. n_farmed = 0
  1277. if context == 'normal':
  1278. self.signoise([-10, 25], [-50, -20])
  1279. for i in range(len(self) // 3)[::-1]:
  1280. st = self[3 * i].stats
  1281. cond1 = st.signoise <= signoise
  1282. cond2 = abs(st.azi - st.lazi) > angle and abs (st.azi - st.lazi) < 360 - angle
  1283. if cond1 or cond2:
  1284. if cond1:
  1285. this_mark = 1
  1286. elif cond2:
  1287. this_mark = 2
  1288. n_farmed += 1
  1289. self[3 * i].stats['mark'] = self[3 * i + 1].stats['mark'] = self[3 * i + 2].stats['mark'] = this_mark
  1290. if remove:
  1291. throwed += self[3 * i:3 * i + 3]
  1292. self.remove(self[3 * i + 2])
  1293. self.remove(self[3 * i + 1])
  1294. self.remove(self[3 * i])
  1295. else:
  1296. self.signoise([0., 5.], [-5, -0.5])
  1297. angle = None
  1298. for i in range(len(self) // 3)[::-1]:
  1299. trL = self[3 * i]
  1300. trQ = self[3 * i + 1]
  1301. pon = trL.stats.ponset
  1302. cond1 = maxL and maxL * abs(trL.slice(pon - 2, pon + 2).max()) <= abs(trL.slice(pon + 3, pon + 20).max())
  1303. cond2 = sigQ and (
  1304. abs(trQ.slice(pon, pon + 10).max()) <= abs(trQ.slice(pon + 10, pon + 20).max())
  1305. or abs(trQ.slice(pon - 1, pon + 1).max()) >= 2 * abs(trQ.slice(pon + 3, pon + 20).max()))
  1306. cond3 = signoise and trL.stats.signoise <= signoise
  1307. cond4 = signoiseQ and trQ.stats.signoise <= signoiseQ
  1308. cond5 = False
  1309. if broad and not (cond1 or cond2 or cond3 or cond4):
  1310. tr = trQ.slice(pon - 5, pon + 20)
  1311. dat = tr.data
  1312. dat_above = dat > 0.2 * max(dat)
  1313. dat_under = dat < 0.2 * min(dat)
  1314. j = 0
  1315. while j < len(dat):
  1316. if dat_above[j] == True:
  1317. length = np.nonzero(dat_above[j:] == False)[0]
  1318. elif dat_under[j] == True:
  1319. length = np.nonzero(dat_under[j:] == False)[0]
  1320. else:
  1321. j += 1
  1322. continue
  1323. if len(length) == 0:
  1324. length = len(dat) - j
  1325. else:
  1326. length = length[0]
  1327. if length / tr.stats.sampling_rate > broad:
  1328. cond5 = True
  1329. break
  1330. j += length
  1331. if cond1 or cond2 or cond3 or cond4 or cond5:
  1332. if cond1:
  1333. this_mark = 3
  1334. elif cond2:
  1335. this_mark = 4
  1336. elif cond3:
  1337. this_mark = 5
  1338. elif cond4:
  1339. this_mark = 6
  1340. elif cond5:
  1341. this_mark = 7
  1342. n_farmed += 1
  1343. trL.stats['mark'] = trQ.stats['mark'] = self[3 * i + 2].stats['mark'] = this_mark
  1344. if remove:
  1345. throwed += self[3 * i:3 * i + 3]
  1346. self.remove(self[3 * i + 2])
  1347. self.remove(self[3 * i + 1])
  1348. self.remove(self[3 * i])
  1349. log.info('Farming - Arguments+Variables: %s' % (util.parameters()))
  1350. log.info('Farming - Farm %s events from stream %s (unmarked %s)' % (n_farmed, self.hash, len(self.select(expr='st.mark==False')) // 3))
  1351. if remove:
  1352. return throwed
  1353. def setHIForHist(self, events, period=24 * 3600):
  1354. start = self[0].stats.starttime + 0.1
  1355. end = self[-1].stats.starttime + 0.1
  1356. hist_list, mag_list = events.produceHist(start, end, period)
  1357. self.setHI('num_events', hist_list)
  1358. self.setHI('max_mag', mag_list)
  1359. def addZeros(self, secs_before, secs_after=None):
  1360. for tr in self:
  1361. tr.addZeros(secs_before, secs_after=secs_after)
  1362. def stretch(self, reftr=None, stretch=None, str_range=0.1, nstr=101, time_windows=None,
  1363. sides='right'):
  1364. from sito.noisexcorr import stretch as stretch_fun
  1365. result = stretch_fun(self, reftr=reftr, stretch=stretch, str_range=str_range, nstr=nstr,
  1366. time_windows=time_windows, sides=sides)
  1367. return result