Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixes phase follow #8

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
180 changes: 166 additions & 14 deletions examples/3-event-analysis.ipynb

Large diffs are not rendered by default.

142 changes: 142 additions & 0 deletions examples/4-event-spectrogram.ipynb

Large diffs are not rendered by default.

71 changes: 59 additions & 12 deletions lightguide/blast.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from __future__ import annotations
from collections import deque

import logging
from copy import deepcopy
Expand All @@ -8,6 +9,7 @@
TYPE_CHECKING,
Any,
Callable,
Deque,
Iterable,
Iterator,
Literal,
Expand All @@ -19,14 +21,17 @@
import numpy as np
from matplotlib import colors, dates
from matplotlib.colors import Colormap
from pyrocko import io
from pyrocko import io, marker
from pyrocko.trace import Trace
from scipy import signal
import re

from lightguide.utils import PathStr
from lightguide.models.picks import *

from .filters import afk_filter
from .signal import decimation_coefficients
from .spectrogram import get_spectrogram

if TYPE_CHECKING:
from matplotlib import image
Expand Down Expand Up @@ -330,6 +335,7 @@ def follow_phase(
window_size: int | tuple[int, int] = 50,
threshold: float = 5e-1,
max_shift: int = 20,
template_stacks: int = 1,
) -> tuple[np.ndarray, list[datetime], np.ndarray]:
"""Follow a phase pick through a Blast.

Expand All @@ -340,7 +346,7 @@ def follow_phase(
2. Calculate normalized cross correlate with downwards neighbor.
3. Evaluate maximum x-correlation in allowed window (max_shift).
4. Update template trace and go to 2.

4a. if no_of_stacks >1: stack templates for correlation to stabilize
5. Repeat for upward neighbors.

Args:
Expand All @@ -353,6 +359,10 @@ def follow_phase(
Defaults to 5e-1.
max_shift (int, optional): Maximum allowed shift in samples for
neighboring picks. Defaults to 20.
template_stacks (int): Numbers of traces to stack to define the template. Default is 1,
i.e. a single trace.
Stacking close to root template is limited by the distance to the
root template.

Returns:
tuple[np.ndarray, list[datetime], np.ndarray]: Tuple of channel number,
Expand All @@ -362,6 +372,7 @@ def follow_phase(
window_size = (window_size, window_size)

pick_channel -= self.start_channel

root_idx = self._time_to_sample(pick_time)

# Ensure the window is odd-sized with the pick in center.
Expand All @@ -372,15 +383,19 @@ def follow_phase(

pick_channels, pick_times, pick_correlations = [], [], []

def prepare_template(data: np.ndarray) -> np.ndarray:
def prepare_template(data: Deque[np.ndarray]) -> np.ndarray:
data = np.mean(data, axis=0)
return data * template_taper

def correlate(data: np.ndarray, direction: Literal[1, -1] = 1) -> None:
template = root_template.copy()
index = root_idx
template_stack: Deque[np.ndarray] = deque(
[np.array(template)], maxlen=template_stacks
)

index = root_idx
for ichannel, trace in enumerate(data):
template = prepare_template(template)
template = prepare_template(template_stack)
norm = np.sqrt(np.sum(template**2)) * np.sqrt(np.sum(trace**2))
correlation = np.correlate(trace, template, mode="same")
correlation = np.abs(correlation / norm)
Expand Down Expand Up @@ -409,15 +424,19 @@ def correlate(data: np.ndarray, direction: Literal[1, -1] = 1) -> None:
template = trace[
phase_idx - window_size[0] : phase_idx + window_size[1] + 1
].copy()

# stacking
template_stack.append(template)
index = phase_idx

correlate(self.data[pick_channel:])
correlate(self.data[: pick_channel - 1][::-1], direction=-1)

pick_channels = np.array(pick_channels) + self.start_channel
pick_correlations = np.array(pick_correlations)
pick_channels = (np.array(pick_channels) + self.start_channel).tolist()

return pick_channels, pick_times, pick_correlations
return Picks(
channel=pick_channels, time=pick_times, correlation=pick_correlations
)

def taper(self, alpha: float = 0.05) -> None:
"""Taper in time-domain and in-place with a Tukey window.
Expand Down Expand Up @@ -486,6 +505,34 @@ def trim_time(self, begin: float = 0.0, end: float = -1.0) -> Blast:
blast.start_time += timedelta(seconds=begin)
return blast

def trim_from_picks(self, picks: Picks, time_window: int = 1):
"""Trims channels to a given time window after a pick time.

Args:
picks (Picks):
time_window (int): time window after pick
"""
blast = self.copy()
blast = blast.as_traces()

channels = picks.channel
times = picks.time

trimmed_traces = []
for channel, time in zip(channels, times):
time = time.timestamp()
# find channel
tr = next((x for x in blast if int(x.station) == channel), None)

# check if marker is in time range of trace
if not tr.time_span[0] <= time <= tr.time_span[1]:
continue

trchop = tr.chop(tmin=time, tmax=time + time_window)
trimmed_traces.append(trchop)

return trimmed_traces

def to_strain(self, detrend: bool = True) -> Blast:
"""Convert the traces to strain.

Expand Down Expand Up @@ -590,7 +637,7 @@ def plot(
dates.date2num(self.start_time) if show_date else 0.0,
)

data = self.data.copy()
data = self.data.copy().astype(float)
if normalize_traces:
data /= np.abs(data.max(axis=1, keepdims=True))

Expand Down Expand Up @@ -671,7 +718,7 @@ def from_pyrocko(cls, traces: list[Trace], channel_spacing: float = 4.0) -> Blas
if not traces:
raise ValueError("Empty list of traces")

traces = sorted(traces, key=lambda tr: int(tr.station))
traces = sorted(traces, key=lambda tr: int(re.sub(r"\D", "", tr.station)))
ntraces = len(traces)

tmin = set()
Expand Down Expand Up @@ -702,7 +749,7 @@ def from_pyrocko(cls, traces: list[Trace], channel_spacing: float = 4.0) -> Blas
data=data,
start_time=datetime.fromtimestamp(tmin.pop(), tz=timezone.utc),
sampling_rate=int(1.0 / delta_t.pop()),
start_channel=min(int(tr.station) for tr in traces),
start_channel=min(int(re.sub(r"\D", "", tr.station)) for tr in traces),
channel_spacing=channel_spacing,
)

Expand Down Expand Up @@ -785,7 +832,7 @@ def __len__(self) -> int:
bandpass = shared_function(Blast.bandpass)
afk_filter = shared_function(Blast.afk_filter)
decimate = shared_function(Blast.decimate)

# why some functions two times??
trim_time = shared_function(Blast.trim_time)
trim_channels = shared_function(Blast.trim_channels)

Expand Down
24 changes: 24 additions & 0 deletions lightguide/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import numpy as np

from pathlib import Path
from lightguide.blast import Blast
from lightguide.utils import cache_dir, download_http

Expand All @@ -10,6 +11,8 @@ class ExampleData:
VSPDataUrl = "https://data.pyrocko.org/testing/lightguide/VSP-DAS-G1-120.mseed"
DataUrl = "https://data.pyrocko.org/testing/lightguide/das-data.npy"
EQDataUrl = "https://data.pyrocko.org/testing/lightguide/data-DAS-gfz2020wswf.npy"
IceQDataUrl = "https://github.com/sachalapins/DAS-N2N/raw/main/data/BPT1_UTC_20200117_044207.903.mseed"
# MarkersUrl = "https://data.pyrocko.org/testing/lightguide/markers_VSP-DAS-G1-120.txt"

@classmethod
def earthquake(cls) -> Blast:
Expand All @@ -31,3 +34,24 @@ def vsp_shot(cls) -> Blast:
download_http(cls.VSPDataUrl, file)

return Blast.from_miniseed(file, channel_spacing=1.0)

@classmethod
def icequake(cls) -> Blast:
file = cache_dir() / "BPT1_UTC_20200117_044207.903.mseed"
if not file.exists():
download_http(cls.IceQDataUrl, file)

return Blast.from_miniseed(file, channel_spacing=1.0)

@classmethod
def markerfile(cls) -> Blast:
file = Path("../examples/markers_VSP-DAS-G1-120.txt")
if not file.exists():
print(
"Markerfile has not yet been created.\n \
to create a markerfile please see EXAMPLE 3"
)
# file = cache_dir() / "markers_VSP-DAS-G1-120.txt"
# download_http(cls.MarkersUrl, file)

return file
51 changes: 51 additions & 0 deletions lightguide/models/picks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
from pydantic import BaseModel
from typing import List, Tuple, Dict
from datetime import datetime
from pyrocko import marker


class Picks(BaseModel):
channel: list[int] = [-1]
time: list[datetime] = None
correlation: list[float] = []
kind: list[int] = []

def save_picks(self, filename):
"""
Saves picks as a pyrocko markerfile.

Args:
filename (str): path to filename
channels (list): list of channelnames
times (list of datetimes): list of pick times in datetime-format
"""
channels = self.channel
times = self.time
kinds = self.kind

if not self.kind:
kinds = [0] * len(channels)

markers = []
for ch, ptime, kind in zip(channels, times, kinds):
nslc_id = [("DAS", "%05d" % ch, "", "")]
tmin = ptime.timestamp()
m = marker.Marker(nslc_ids=nslc_id, tmin=tmin, tmax=tmin, kind=kind)
markers.append(m)
marker.save_markers(markers=markers, filename=filename)

def get_pyrocko_picks(self, filename):
"""
Loads pyrocko picks from file.

Agrs:
filename (str): filename to read
"""
markers = marker.load_markers(filename)
channels = [m.nslc_ids[0][1] for m in markers]
times = [m.tmin for m in markers]
kinds = [m.kind for m in markers]

return Picks(channel=channels, time=times, kind=kinds)

# def plot ()
Loading