Commit 993ce766 authored by Marius Kriegerowski's avatar Marius Kriegerowski

same coordinate systems

parent aa0f0675
......@@ -14,6 +14,7 @@ import logging
import numpy as num
import os
import glob
import sys
from .tf_util import _FloatFeature, _Int64Feature, _BytesFeature
......@@ -21,6 +22,9 @@ pjoin = os.path.join
EPSILON = 1E-4
# TODOS:
# - remove 'double events'
class Noise(Object):
level = Float.T(default=1.)
......@@ -55,12 +59,7 @@ class DataGenerator(Object):
def __init__(self, *args, **kwargs):
super(DataGenerator, self).__init__(*args, **kwargs)
self.classes = [
#
# TODO
# https://hanxiao.github.io/2017/07/07/Get-10x-Speedup-in-Tensorflow-Multi-Task-Learning-using-Python-Multiprocessing/
# 'north_shift', 'east_shift', 'depth', 'strike', 'dip', 'rake']
'north_shift', 'east_shift', 'depth']
self.classes = ['north_shift', 'east_shift', 'depth']
self.n_classes = len(self.classes)
self.setup()
......@@ -68,7 +67,10 @@ class DataGenerator(Object):
pass
def extract_labels(self, source):
return [getattr(source, classname) for classname in self.classes]
n, e = orthodrome.latlon_to_ne(
self.reference_target.lat, self.reference_target.lon,
source.lat, source.lon)
return (n, e, source.depth)
def get_raw_data_chunk(self):
'''
......@@ -107,6 +109,9 @@ class DataGenerator(Object):
istart_trace: min(data_len, self.n_samples_max-istart_array)+istart_trace]
chunk[i, istart_array: istart_array+ydata.shape[0]] += ydata
chunk -= num.min(chunk)
chunk /= num.max(chunk)
@property
def tensor_shape(self):
return self._shape
......@@ -145,12 +150,12 @@ class PileData(DataGenerator):
def setup(self):
# TODO convert to north_shift, east_shift accroding to seismosizer data grid
self.classes = ['lat', 'lon', 'depth']
self.classes = ['north_shift', 'east_shift', 'depth']
self.data_pile = pile.make_pile(self.data_path, fileformat=self.data_format)
if self.data_pile.is_empty():
sys.exit('Data pile is empty!')
markers = marker.load_markers(self.fn_markers)
marker.associate_phases_to_events(markers)
markers.sort(key=lambda x: x.tmin)
markers_by_nsl = {}
for m in markers:
......@@ -163,6 +168,7 @@ class PileData(DataGenerator):
assert(len(markers_by_nsl) == 1)
self.markers = list(markers_by_nsl.values())[0]
self.markers.sort(key=lambda x: x.tmin)
self.deltat_want = self.deltat_want or min(self.data_pile.deltats.keys())
self.channels = list(self.data_pile.nslc_ids.keys())
......@@ -187,6 +193,7 @@ class PileData(DataGenerator):
if event is None:
logging.debug('No event: %s' % m)
continue
for trs in self.data_pile.chopper(
tmin=m.tmin, tmax=m.tmin+tr_len, keep_current_files_open=True):
_trs = []
......@@ -203,7 +210,6 @@ class PileData(DataGenerator):
yield chunk, self.extract_labels(event)
class TFRecordData(DataGenerator):
# NOT TESTED YET
......@@ -229,7 +235,6 @@ class OnTheFlyData(DataGenerator):
stations = load_stations(self.fn_stations)
self.targets = synthi.guess_targets_from_stations(
stations, quantity=self.quantity)
self.tensor_shape = (len(self.targets), self.n_samples_max)
def make_data_chunk(self, source, results, store):
......@@ -244,11 +249,15 @@ class OnTheFlyData(DataGenerator):
traces = [result.trace for result in results]
self.fit_data_into_chunk(traces, ydata_stacked, tref=tref)
ydata_stacked -= num.min(ydata_stacked)
ydata_stacked /= num.max(ydata_stacked)
return ydata_stacked
def extract_labels(self, source):
elat, elon = source.effective_latlon
n, e = orthodrome.latlon_to_ne(
self.reference_target.lat, self.reference_target.lon,
elat, elon)
return (n, e, source.depth)
def generate(self):
swarm = synthi.setup(self.gf_engine, self.n_sources)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment