{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# Make Network File\n",
"\n",
"In this notebook, we compile the seismic network metadata that will be used recurrently through the workflow. We also scan the whole preprocessed data set to measure the daily data availability, which is essential for estimating the detection capability of the network at a given time."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import fnmatch\n",
"import glob\n",
"import pandas as pd\n",
"import os\n",
"import sys\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import obspy as obs\n",
"\n",
"from BPMF.config import cfg\n",
"from BPMF.dataset import Network\n",
"from matplotlib.ticker import FixedLocator, FormatStrFormatter"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"NETWORK_FILENAME = \"network.csv\"\n",
"AVAILABILITY_FILENAME = \"availability.csv\"\n",
"preproc_folder_name = f\"preprocessed_{cfg.MIN_FREQ_HZ:.0f}_{cfg.MAX_FREQ_HZ:.0f}\""
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"DatetimeIndex(['2012-07-26'], dtype='datetime64[ns]', freq='D')"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# we check the station metadata and data availability between START_DATE and END_DATE\n",
"# these also define the start and end of the experiment\n",
"START_DATE = \"2012-07-26\"\n",
"END_DATE = \"2012-07-26\"\n",
"datelist = pd.date_range(start=START_DATE, end=END_DATE)\n",
"datelist"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"# initialize data frames\n",
"daily_availability = pd.DataFrame()\n",
"network_metadata = pd.DataFrame(\n",
" columns=[\"network_code\", \"station_code\", \"longitude\", \"latitude\", \"elevation_m\"]\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"# even though this tutorial analyzes a single day, this notebook is written for an extended\n",
"# data base following the folder tree convention used here\n",
"for date in datelist:\n",
" row_name = date.strftime(\"%Y-%m-%d\")\n",
" data_folder = os.path.join(\n",
" cfg.INPUT_PATH, str(date.year), date.strftime(\"%Y%m%d\"), preproc_folder_name\n",
" )\n",
" resp_folder = os.path.join(\n",
" cfg.INPUT_PATH, str(date.year), date.strftime(\"%Y%m%d\"), \"resp\"\n",
" )\n",
" data_filenames = glob.glob(os.path.join(data_folder, \"*mseed\"))\n",
" daily_network_metadata = pd.DataFrame(\n",
" columns=[\"network_code\", \"station_code\", \"longitude\", \"latitude\", \"elevation_m\"]\n",
" )\n",
" for fname in data_filenames:\n",
" # we are only interested in the filename, not the entire path\n",
" fname = os.path.basename(fname)\n",
" # the filename contains information on the channel id\n",
" net_code, sta_code, loc_code, cha_code, ext = fname.split(\".\")\n",
" cha_code = cha_code[: cha_code.find(\"_\")]\n",
" # print(net_code, sta_code, loc_code, cha_code)\n",
" daily_network_metadata.loc[\n",
" f\"{net_code}.{sta_code}\", [\"network_code\", \"station_code\"]\n",
" ] = [net_code, sta_code]\n",
" \n",
" for sta_id in daily_network_metadata.index:\n",
" # count the number of channels associated with sta_id\n",
" channels = fnmatch.filter(data_filenames, f\"*{sta_id}.*mseed\")\n",
" daily_availability.loc[row_name, sta_id] = len(channels)\n",
" if sta_id not in network_metadata.index:\n",
" station_inv = obs.read_inventory(\n",
" os.path.join(resp_folder, f\"{sta_id}.xml\")\n",
" )[0][0]\n",
" daily_network_metadata.loc[\n",
" sta_id, [\"longitude\", \"latitude\", \"elevation_m\"]\n",
" ] = [station_inv.longitude, station_inv.latitude, station_inv.elevation]\n",
" network_metadata = pd.concat([network_metadata, daily_network_metadata]).drop_duplicates()\n"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"
\n",
"\n",
"
\n",
" \n",
" \n",
" | \n",
" network_code | \n",
" station_code | \n",
" longitude | \n",
" latitude | \n",
" elevation_m | \n",
"
\n",
" \n",
" \n",
" \n",
" YH.DE07 | \n",
" YH | \n",
" DE07 | \n",
" 30.411539 | \n",
" 40.679661 | \n",
" 40.0 | \n",
"
\n",
" \n",
" YH.DC07 | \n",
" YH | \n",
" DC07 | \n",
" 30.24217 | \n",
" 40.66708 | \n",
" 164.0 | \n",
"
\n",
" \n",
" YH.DC08 | \n",
" YH | \n",
" DC08 | \n",
" 30.25013 | \n",
" 40.744438 | \n",
" 162.0 | \n",
"
\n",
" \n",
" YH.DE08 | \n",
" YH | \n",
" DE08 | \n",
" 30.406469 | \n",
" 40.748562 | \n",
" 31.0 | \n",
"
\n",
" \n",
" YH.SPNC | \n",
" YH | \n",
" SPNC | \n",
" 30.3083 | \n",
" 40.686001 | \n",
" 190.0 | \n",
"
\n",
" \n",
" YH.DC06 | \n",
" YH | \n",
" DC06 | \n",
" 30.265751 | \n",
" 40.616718 | \n",
" 555.0 | \n",
"
\n",
" \n",
" YH.SAUV | \n",
" YH | \n",
" SAUV | \n",
" 30.3272 | \n",
" 40.7402 | \n",
" 170.0 | \n",
"
\n",
" \n",
" YH.DD06 | \n",
" YH | \n",
" DD06 | \n",
" 30.31777 | \n",
" 40.623539 | \n",
" 182.0 | \n",
"
\n",
" \n",
"
\n",
"
"
],
"text/plain": [
" network_code station_code longitude latitude elevation_m\n",
"YH.DE07 YH DE07 30.411539 40.679661 40.0\n",
"YH.DC07 YH DC07 30.24217 40.66708 164.0\n",
"YH.DC08 YH DC08 30.25013 40.744438 162.0\n",
"YH.DE08 YH DE08 30.406469 40.748562 31.0\n",
"YH.SPNC YH SPNC 30.3083 40.686001 190.0\n",
"YH.DC06 YH DC06 30.265751 40.616718 555.0\n",
"YH.SAUV YH SAUV 30.3272 40.7402 170.0\n",
"YH.DD06 YH DD06 30.31777 40.623539 182.0"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"network_metadata"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"\n",
"\n",
"
\n",
" \n",
" \n",
" | \n",
" YH.DE07 | \n",
" YH.DC07 | \n",
" YH.DC08 | \n",
" YH.DE08 | \n",
" YH.SPNC | \n",
" YH.DC06 | \n",
" YH.SAUV | \n",
" YH.DD06 | \n",
"
\n",
" \n",
" \n",
" \n",
" 2012-07-26 | \n",
" 3.0 | \n",
" 3.0 | \n",
" 3.0 | \n",
" 3.0 | \n",
" 3.0 | \n",
" 3.0 | \n",
" 3.0 | \n",
" 3.0 | \n",
"
\n",
" \n",
"
\n",
"
"
],
"text/plain": [
" YH.DE07 YH.DC07 YH.DC08 YH.DE08 YH.SPNC YH.DC06 YH.SAUV \\\n",
"2012-07-26 3.0 3.0 3.0 3.0 3.0 3.0 3.0 \n",
"\n",
" YH.DD06 \n",
"2012-07-26 3.0 "
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"daily_availability"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Save the network metadata and data availability"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"../network/network.csv\n"
]
}
],
"source": [
"network_metadata.index.name = \"station_id\"\n",
"print(os.path.join(cfg.NETWORK_PATH, NETWORK_FILENAME))\n",
"network_metadata.to_csv(os.path.join(cfg.NETWORK_PATH, NETWORK_FILENAME), sep=\"\\t\")\n",
"# add two header lines\n",
"with open(os.path.join(cfg.NETWORK_PATH, NETWORK_FILENAME), \"r+\") as fnet:\n",
" content = fnet.read()\n",
" # move pointer to beginning of file\n",
" fnet.seek(0, 0)\n",
" # append lines at the beginning\n",
" fnet.write(f\"{START_DATE}\\t{END_DATE}\\n\")\n",
" # write the name of the components used on each station\n",
" # note: the list of components will be used to broadcast\n",
" # network waveforms into a single numpy.ndarray, so even\n",
" # if some stations only have one component we need to \n",
" # fill their missing components with zeros in order to\n",
" # keep consistent data dimensions across stations\n",
" fnet.write(f\"N\\tE\\tZ\\n\")\n",
" fnet.write(content)\n",
"daily_availability.to_csv(os.path.join(cfg.NETWORK_PATH, AVAILABILITY_FILENAME))"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Test reading the new network file with `BPMF.dataset.Network`\n",
"\n",
"The csv file with network metadata is meant to be read into an instance of `BPMF.dataset.Network`."
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"net = Network(NETWORK_FILENAME)\n",
"net.read()"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array(['DE07', 'DC07', 'DC08', 'DE08', 'SPNC', 'DC06', 'SAUV', 'DD06'],\n",
" dtype='\n",
"\n",
"\n",
" \n",
" \n",
" | \n",
" DE07 | \n",
" DC07 | \n",
" DC08 | \n",
" DE08 | \n",
" SPNC | \n",
" DC06 | \n",
" SAUV | \n",
" DD06 | \n",
"
\n",
" \n",
" \n",
" \n",
" DE07 | \n",
" 0.000000 | \n",
" 14.388528 | \n",
" 15.420268 | \n",
" 7.663334 | \n",
" 8.757172 | \n",
" 14.183476 | \n",
" 9.798210 | \n",
" 10.087409 | \n",
"
\n",
" \n",
" DC07 | \n",
" 14.388528 | \n",
" 0.000000 | \n",
" 8.616766 | \n",
" 16.572942 | \n",
" 5.972804 | \n",
" 5.950485 | \n",
" 10.842966 | \n",
" 8.016817 | \n",
"
\n",
" \n",
" DC08 | \n",
" 15.420268 | \n",
" 8.616766 | \n",
" 0.000000 | \n",
" 13.212465 | \n",
" 8.140696 | \n",
" 14.249786 | \n",
" 6.526482 | \n",
" 14.592524 | \n",
"
\n",
" \n",
" DE08 | \n",
" 7.663334 | \n",
" 16.572942 | \n",
" 13.212465 | \n",
" 0.000000 | \n",
" 10.820888 | \n",
" 18.871834 | \n",
" 6.760531 | \n",
" 15.779586 | \n",
"
\n",
" \n",
" SPNC | \n",
" 8.757172 | \n",
" 5.972804 | \n",
" 8.140696 | \n",
" 10.820888 | \n",
" 0.000000 | \n",
" 8.501549 | \n",
" 6.227020 | \n",
" 6.982323 | \n",
"
\n",
" \n",
" DC06 | \n",
" 14.183476 | \n",
" 5.950485 | \n",
" 14.249786 | \n",
" 18.871834 | \n",
" 8.501549 | \n",
" 0.000000 | \n",
" 14.668558 | \n",
" 4.481904 | \n",
"
\n",
" \n",
" SAUV | \n",
" 9.798210 | \n",
" 10.842966 | \n",
" 6.526482 | \n",
" 6.760531 | \n",
" 6.227020 | \n",
" 14.668558 | \n",
" 0.000000 | \n",
" 12.979454 | \n",
"
\n",
" \n",
" DD06 | \n",
" 10.087409 | \n",
" 8.016817 | \n",
" 14.592524 | \n",
" 15.779586 | \n",
" 6.982323 | \n",
" 4.481904 | \n",
" 12.979454 | \n",
" 0.000000 | \n",
"
\n",
" \n",
"
\n",
""
],
"text/plain": [
" DE07 DC07 DC08 DE08 SPNC DC06 \\\n",
"DE07 0.000000 14.388528 15.420268 7.663334 8.757172 14.183476 \n",
"DC07 14.388528 0.000000 8.616766 16.572942 5.972804 5.950485 \n",
"DC08 15.420268 8.616766 0.000000 13.212465 8.140696 14.249786 \n",
"DE08 7.663334 16.572942 13.212465 0.000000 10.820888 18.871834 \n",
"SPNC 8.757172 5.972804 8.140696 10.820888 0.000000 8.501549 \n",
"DC06 14.183476 5.950485 14.249786 18.871834 8.501549 0.000000 \n",
"SAUV 9.798210 10.842966 6.526482 6.760531 6.227020 14.668558 \n",
"DD06 10.087409 8.016817 14.592524 15.779586 6.982323 4.481904 \n",
"\n",
" SAUV DD06 \n",
"DE07 9.798210 10.087409 \n",
"DC07 10.842966 8.016817 \n",
"DC08 6.526482 14.592524 \n",
"DE08 6.760531 15.779586 \n",
"SPNC 6.227020 6.982323 \n",
"DC06 14.668558 4.481904 \n",
"SAUV 0.000000 12.979454 \n",
"DD06 12.979454 0.000000 "
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# inter-station distance in km\n",
"net.interstation_distances"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"data": {
"image/svg+xml": [
"\n",
"\n",
"\n"
],
"text/plain": [
""
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"%config InlineBackend.figure_formats = [\"svg\"]\n",
"\n",
"# plot a simple map with the station locations\n",
"fig = net.plot_map()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3.10.4 ('hy7_py310')",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.4"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "221f0e5b1b98151b07a79bf3b6d0c1d306576197d2c4531763770570a29e708e"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}