Skip to content

Instantly share code, notes, and snippets.

@nghia1991ad
Last active November 2, 2022 06:13
Show Gist options
  • Save nghia1991ad/7ef408a3806ff6d8163a7b4fcac6a746 to your computer and use it in GitHub Desktop.
Save nghia1991ad/7ef408a3806ff6d8163a7b4fcac6a746 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"from glob import glob\n",
"import pandas as pd\n",
"import obspy\n",
"from obspy import read_inventory\n",
"import os\n",
"from obspy import UTCDateTime\n",
"from obspy.geodetics import gps2dist_azimuth\n",
"def distcal(stla,stlo,evla,evlo):\n",
" return gps2dist_azimuth(stla,stlo,evla,evlo)[0]/1000\n",
"import numpy as np\n",
"from scipy.optimize import curve_fit\n",
"from obspy.taup import TauPyModel\n",
"model = TauPyModel(model=\"iasp91\")\n",
"import matplotlib.pyplot as plt"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"def get_disp(tr):\n",
" \"\"\"Integrate acceleration to displacement.\n",
" Args:\n",
" tr (StationTrace):\n",
" Trace of acceleration data. This is the trace where the Cache values will\n",
" be set.\n",
" config (dict):\n",
" Configuration dictionary (or None). See get_config().\n",
" Returns:\n",
" StationTrace.\n",
" \"\"\"\n",
" acc = tr.copy()\n",
" try:\n",
" disp = acc.integrate().integrate()\n",
" except Exception as e:\n",
" raise e\n",
" return disp\n",
"\n",
"def correct_baseline(trace):\n",
" \"\"\"\n",
" Performs a baseline correction following the method of Ancheta\n",
" et al. (2013). This removes low-frequency, non-physical trends\n",
" that remain in the time series following filtering.\n",
" Args:\n",
" trace (obspy.core.trace.Trace):\n",
" Trace of strong motion data.\n",
" config (dict):\n",
" Configuration dictionary (or None). See get_config().\n",
" Returns:\n",
" trace: Baseline-corrected trace.\n",
" \"\"\"\n",
" # Integrate twice to get the displacement time series\n",
" disp = get_disp(trace)\n",
"\n",
" # Fit a sixth order polynomial to displacement time series, requiring\n",
" # that the 1st and 0th order coefficients are zero\n",
" time_values = (\n",
" np.linspace(0, trace.stats.npts - 1, trace.stats.npts) * trace.stats.delta\n",
" )\n",
" poly_cofs = list(curve_fit(_poly_func, time_values, disp.data)[0])\n",
" poly_cofs += [0, 0]\n",
"\n",
" # Construct a polynomial from the coefficients and compute\n",
" # the second derivative\n",
" polynomial = np.poly1d(poly_cofs)\n",
" polynomial_second_derivative = np.polyder(polynomial, 2)\n",
"\n",
" # Subtract the second derivative of the polynomial from the\n",
" # acceleration trace\n",
" trace.data -= polynomial_second_derivative(time_values)\n",
"\n",
" return trace\n",
"\n",
"def _poly_func(x, a, b, c, d, e):\n",
" \"\"\"\n",
" Model polynomial function for polynomial baseline correction.\n",
" \"\"\"\n",
" return a * x ** 6 + b * x ** 5 + c * x ** 4 + d * x ** 3 + e * x ** 2"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"def add0(s):\n",
" if len(str(s)) < 2:\n",
" return \"0\" + str(s)\n",
" else:\n",
" return str(s)\n",
"\n",
"def add00(s):\n",
" if len(str(s)) == 1:\n",
" return \"00\" + str(s)\n",
" elif len(str(s)) == 2: \n",
" return \"0\" + str(s)\n",
" else:\n",
" return str(s)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"def convert_utc(yy,mm,dd,hh,mi,s):\n",
" return UTCDateTime(int(yy), int(mm), int(dd), int(hh), int(mi), int(s))\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"respf = \"/home/nghianc/auto_pick/instrument_response/XML\"\n",
"eqcat = pd.read_csv(\"vn_cat.csv\",index_col=0)\n",
"evl = sorted([a.split(\"_\")[-2] for a in glob(\"../sac_data/*\")])\n",
"eqcat['utctime'] = eqcat.apply(lambda x: convert_utc(x.year, x.month, x.day, x.hour, x.minute, x.second),axis=1)\n"
]
},
{
"cell_type": "code",
"execution_count": 30,
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
]
}
],
"source": [
"for i in range(0,len(evl)):\n",
"# for i in range(0,1):\n",
" ev = evl[i]\n",
" t0 = UTCDateTime(evl[i])\n",
" print(\"Read event {}\".format(evl[i]))\n",
" dfinfo = eqcat[eqcat['utctime'] == UTCDateTime(evl[i])].reset_index(drop=True)\n",
" sacl = glob(\"../sac_data/*{}*/*\".format(evl[i]))\n",
" stal = list(set([a.split(\"/\")[-1].split(\".\")[0] for a in sacl]))\n",
" fs,nf = [],[]\n",
" for sta in stal:\n",
" try:\n",
" inv = read_inventory(os.path.join(respf,\"VN.{}.xml\".format(sta)))\n",
" # print(\"Found station {}\".format(sta))\n",
" fs.append(sta)\n",
" except:\n",
" print(\"Station {} not found\".format(sta))\n",
" nf.append(sta)\n",
" pass\n",
" sacpr = [a for a in [a for a in sacl if a.split(\"/\")[-1].split(\".\")[0] in fs] if \"HHN\" in a]\n",
" sacpr = [a.replace(\"HHN\",\"HH*\") for a in sacpr]\n",
" # for sacfile in sacpr:\n",
" for file in sacpr:\n",
" print(ev,UTCDateTime(t0))\n",
" print(\"Read station {} - event {}\".format(sta,ev))\n",
" time = t0\n",
" julday = time.julday\n",
" hhmm = time.strftime(format=\"%H%M\")\n",
" year = time.year\n",
" sta = file.split(\"/\")[-1].split(\".\")[0]\n",
" inv = read_inventory(os.path.join(respf,\"VN.{}.xml\".format(sta)))\n",
" for chan in inv[0][0]:\n",
" chan.start_date = UTCDateTime(2000,1,1)\n",
" chan.end_date = UTCDateTime(2025,1,1)\n",
" try:\n",
"\n",
" st = obspy.read(file)\n",
" sr = 100\n",
" trz = st.select(component=\"Z\")[0].interpolate(sampling_rate=sr)\n",
" tre = st.select(component=\"E\")[0].interpolate(sampling_rate=sr)\n",
" trn = st.select(component=\"N\")[0].interpolate(sampling_rate=sr)\n",
" pre_filt = (0.005, 0.006, 30.0, 35.0)\n",
" stla = \"{:.3f}\".format(inv[0][0].latitude)\n",
" stlo = \"{:.3f}\".format(inv[0][0].longitude)\n",
" stel = \"{:.3f}\".format(inv[0][0].elevation)\n",
" evla = \"{:.3f}\".format(dfinfo.Lat[0])\n",
" evlo = \"{:.3f}\".format(dfinfo.Lon[0])\n",
" evdp = \"{:.3f}\".format(dfinfo.Depth[0])\n",
" mag = \"{:.1f}\".format(dfinfo.Ml[0])\n",
" dis = distcal(float(stla),float(stlo),float(evla),float(evlo))\n",
" t0 = UTCDateTime(evl[i])\n",
" ml = mag\n",
" if dis < 350:\n",
" for tr in [trz,tre,trn]:\n",
" tr.detrend(\"demean\")\n",
" tr = correct_baseline(tr)\n",
" tr.stats.channel = tr.stats.channel.replace(\"HH \",\"HH\")\n",
" tr.remove_response(inventory=inv, pre_filt=pre_filt, output=\"VEL\",\n",
" water_level=60)\n",
"\n",
" tr.trim(t0,t0+180)\n",
" tr.stats.sac[\"stla\"] = float(stla)\n",
" tr.stats.sac[\"stlo\"] = float(stlo)\n",
" tr.stats.sac[\"stel\"] = float(stel)\n",
" tr.stats.sac[\"mag\"] = float(mag)\n",
" tr.stats.sac[\"o\"] = 0\n",
" tr.stats.sac['evla'] = float(evla)\n",
" tr.stats.sac['evlo'] = float(evlo)\n",
" tr.stats.sac['evdp'] = float(evdp)\n",
" tr.stats.sac['kcmpnm'] = tr.stats.sac['kcmpnm'].replace(\"HH \",\"HH\")\n",
" chan = tr.stats.channel\n",
" network = \"VN\"\n",
" pathvel = os.path.join(\"vel\",ev)\n",
" if not os.path.exists(pathvel):\n",
" os.makedirs(pathvel)\n",
" filevel = os.path.join(pathvel,\"{}.{}.{}.{}.VEL\".format(network,sta,ev,chan))\n",
" tr.write(filevel,format=\"SAC\")\n",
"\n",
" path = os.path.join(\"MW\",ev)\n",
" if not os.path.exists(path):\n",
" os.makedirs(path)\n",
" filepath = os.path.join(path,\"{}.{}.{}.{}.{}.{}.sac\".format(network,sta,chan,year,julday,hhmm))\n",
" tr.write(filepath)\n",
" except:\n",
" print(\"error\")\n",
"# raise"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment