Skip to content

Commit 1a718b2

Browse files
authored
Merge pull request #14 from oceanmodeling/fixes
fixes
2 parents 5194dfe + a29c948 commit 1a718b2

File tree

10 files changed

+2609
-2693
lines changed

10 files changed

+2609
-2693
lines changed

.github/workflows/run_tests.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,11 +37,11 @@ jobs:
3737
shell: "bash -eo pipefail {0}"
3838

3939
steps:
40-
- uses: "actions/checkout@v2"
41-
- uses: "actions/setup-python@v2"
40+
- uses: "actions/checkout@v3"
41+
- uses: "actions/setup-python@v3"
4242
with:
4343
python-version: "${{ matrix.python }}"
44-
- uses: "actions/cache@v2"
44+
- uses: "actions/cache@v3"
4545
id: "cache"
4646
with:
4747
path: "${{ env.pythonLocation }}"

.pre-commit-config.yaml

Lines changed: 12 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ fail_fast: false
66

77
repos:
88
- repo: "https://github.com/pre-commit/pre-commit-hooks"
9-
rev: "v4.5.0"
9+
rev: "v5.0.0"
1010
hooks:
1111
- id: "check-added-large-files"
1212
args: ["--maxkb=40000"]
@@ -34,21 +34,21 @@ repos:
3434
- id: "python-use-type-annotations"
3535

3636
- repo: "https://github.com/asottile/reorder_python_imports"
37-
rev: "v3.12.0"
37+
rev: "v3.14.0"
3838
hooks:
3939
- id: "reorder-python-imports"
4040
args:
4141
- "--py38-plus"
4242

4343
- repo: "https://github.com/astral-sh/ruff-pre-commit"
4444
# Ruff version.
45-
rev: "v0.1.7"
45+
rev: "v0.11.7"
4646
hooks:
4747
- id: "ruff"
4848
- id: "ruff-format"
4949

5050
- repo: "https://github.com/kynan/nbstripout"
51-
rev: "0.6.1"
51+
rev: "0.8.1"
5252
hooks:
5353
- id: "nbstripout"
5454

@@ -63,33 +63,11 @@ repos:
6363
- id: "poetry-lock"
6464
name: "poetry lock --no-update"
6565
args: ["--no-update"]
66-
- id: "poetry-export"
67-
name: "poetry export main"
68-
args:
69-
[
70-
"--only",
71-
"main",
72-
"-f",
73-
"requirements.txt",
74-
"-o",
75-
"requirements/requirements.txt",
76-
]
77-
- id: "poetry-export"
78-
name: "poetry export dev"
79-
args:
80-
[
81-
"--with",
82-
"dev",
83-
"-f",
84-
"requirements.txt",
85-
"-o",
86-
"requirements/requirements-dev.txt",
87-
]
88-
89-
- repo: "local"
90-
hooks:
91-
- id: "mypy"
92-
name: "mypy"
93-
entry: "make mypy"
94-
language: "system"
95-
types: ["python"]
66+
- id: poetry-export
67+
args: ["-f", "requirements.txt", "-o", "requirements/requirements.txt"]
68+
verbose: true
69+
pass_filenames: false
70+
- id: poetry-export
71+
args: ["--dev", "-f", "requirements.txt", "-o", "requirements/requirements-dev.txt"]
72+
verbose: true
73+
pass_filenames: false

README.md

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
[![Available on pypi](https://img.shields.io/pypi/v/seastats.svg)](https://pypi.python.org/pypi/seastats/)
2+
[![Conda Version](https://img.shields.io/conda/vn/conda-forge/seastats.svg)](https://anaconda.org/conda-forge/seastats)
23
[![CI](https://github.com/oceanmodeling/seastats/actions/workflows/run_tests.yml/badge.svg)](https://github.com/oceanmodeling/seastats/actions/workflows/run_tests.yml)
34

45
# SeaStats
@@ -15,7 +16,7 @@ def get_stats(
1516
obs: Series,
1617
metrics: Sequence[str] = SUGGESTED_METRICS,
1718
quantile: float = 0,
18-
cluster: int = 72,
19+
cluster: int = 24,
1920
round: int = -1
2021
) -> dict[str, float]
2122
```
@@ -54,7 +55,7 @@ Returns a dictionary containing the calculated metrics and their corresponding v
5455
* `mae`: Mean Absolute Error
5556
* `mse`: Mean Square Error
5657
* `nse`: Nash-Sutcliffe Efficiency
57-
* `lamba`: Lambda index
58+
* `lambda`: Lambda index
5859
* `cr`: Pearson Correlation coefficient
5960
* `cr_95`: Pearson Correlation coefficient for data points above 95th percentile
6061
* `slope`: Slope of Model/Obs correlation
@@ -140,7 +141,7 @@ with:
140141
NB: the function uses [pyextremes](https://georgebv.github.io/pyextremes/quickstart/) in the background, with PoT method, using the `quantile` value of the observed signal as physical threshold and passes the `cluster_duration` argument.
141142

142143

143-
this happens when the function `storms/match_extremes.py` couldn't finc concomitent storms for the observed and modeled time series.
144+
this happens when the function `storms/match_extremes.py` couldn't find concomitent storms for the observed and modeled time series.
144145

145146
## Usage
146147
see [notebook](/notebooks/example_abed.ipynb) for details
@@ -153,6 +154,6 @@ storm = get_stats(sim, obs, quantile = 0.99, metrics = STORM_METRICS) # we use a
153154
pd.DataFrame(dict(general, **storm), index=['abed'])
154155
```
155156

156-
| | bias | rmse | rms | rms_95 | sim_mean | obs_mean | sim_std | obs_std | nse | lamba | cr | cr_95 | slope | intercept | slope_pp | intercept_pp | mad | madp | madc | kge | R1 | R1_norm | R3 | R3_norm | error | error_norm |
157+
| | bias | rmse | rms | rms_95 | sim_mean | obs_mean | sim_std | obs_std | nse | lambda | cr | cr_95 | slope | intercept | slope_pp | intercept_pp | mad | madp | madc | kge | R1 | R1_norm | R3 | R3_norm | error | error_norm |
157158
|:-----|-------:|-------:|------:|---------:|-----------:|-----------:|----------:|----------:|------:|--------:|------:|--------:|--------:|------------:|-----------:|---------------:|------:|-------:|-------:|------:|---------:|----------:|---------:|----------:|----------:|-------------:|
158159
| abed | -0.007 | 0.086 | 0.086 | 0.088 | -0 | 0.007 | 0.142 | 0.144 | 0.677 | 0.929 | 0.817 | 0.542 | 0.718 | -0.005 | 1.401 | -0.028 | 0.052 | 0.213 | 0.265 | 0.81 | 0.237364 | 0.295719 | 0.147163 | 0.207019 | 0.0938142 | 0.177533 |

notebooks/example_abed.ipynb

Lines changed: 23 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
"source": [
99
"import pandas as pd\n",
1010
"import holoviews as hv\n",
11-
"import hvplot.pandas\n",
11+
"import hvplot.pandas # noqa: F401\n",
1212
"\n",
1313
"from seastats import get_stats"
1414
]
@@ -26,8 +26,8 @@
2626
"metadata": {},
2727
"outputs": [],
2828
"source": [
29-
"sim = pd.read_parquet('../tests/data/abed_sim.parquet')\n",
30-
"obs = pd.read_parquet('../tests/data/abed_obs.parquet')"
29+
"sim = pd.read_parquet(\"../tests/data/abed_sim.parquet\")\n",
30+
"obs = pd.read_parquet(\"../tests/data/abed_obs.parquet\")"
3131
]
3232
},
3333
{
@@ -47,7 +47,7 @@
4747
"metadata": {},
4848
"outputs": [],
4949
"source": [
50-
"obs.hvplot(line_dash = \"dashed\") * sim.hvplot()"
50+
"obs.hvplot(line_dash=\"dashed\") * sim.hvplot()"
5151
]
5252
},
5353
{
@@ -67,6 +67,7 @@
6767
"outputs": [],
6868
"source": [
6969
"from seastats import GENERAL_METRICS, STORM_METRICS\n",
70+
"\n",
7071
"GENERAL_METRICS\n",
7172
"STORM_METRICS"
7273
]
@@ -78,7 +79,7 @@
7879
"outputs": [],
7980
"source": [
8081
"stats = get_stats(obs, sim)\n",
81-
"pd.DataFrame(stats, index = ['abed'])"
82+
"pd.DataFrame(stats, index=[\"abed\"])"
8283
]
8384
},
8485
{
@@ -95,6 +96,7 @@
9596
"outputs": [],
9697
"source": [
9798
"from seastats import GENERAL_METRICS_ALL, STORM_METRICS_ALL\n",
99+
"\n",
98100
"GENERAL_METRICS_ALL\n",
99101
"STORM_METRICS_ALL"
100102
]
@@ -149,7 +151,7 @@
149151
"source": [
150152
"threshold = sim.quantile(quantile)\n",
151153
"ext_ = get_extremes(sim, \"POT\", threshold=threshold, r=f\"{cluster_duration}h\")\n",
152-
"extremes1 = pd.DataFrame({\"modeled\" : ext_, \"time_model\" : ext_.index}, index=ext_.index)\n",
154+
"extremes1 = pd.DataFrame({\"modeled\": ext_, \"time_model\": ext_.index}, index=ext_.index)\n",
153155
"ext_"
154156
]
155157
},
@@ -178,7 +180,7 @@
178180
"source": [
179181
"threshold = obs.quantile(quantile)\n",
180182
"ext_ = get_extremes(obs, \"POT\", threshold=threshold, r=f\"{cluster_duration}h\")\n",
181-
"extremes2 = pd.DataFrame({\"modeled\" : ext_, \"time_model\" : ext_.index}, index=ext_.index)\n",
183+
"extremes2 = pd.DataFrame({\"modeled\": ext_, \"time_model\": ext_.index}, index=ext_.index)\n",
182184
"ext_"
183185
]
184186
},
@@ -188,7 +190,9 @@
188190
"metadata": {},
189191
"outputs": [],
190192
"source": [
191-
"obs_plot = obs.hvplot(line_dash = \"dashed\") * hv.Scatter((extremes2.index, extremes2.modeled)).opts(color=\"red\")\n",
193+
"obs_plot = obs.hvplot(line_dash=\"dashed\") * hv.Scatter((extremes2.index, extremes2.modeled)).opts(\n",
194+
" color=\"red\",\n",
195+
")\n",
192196
"obs_plot"
193197
]
194198
},
@@ -206,7 +210,8 @@
206210
"outputs": [],
207211
"source": [
208212
"from seastats.storms import match_extremes\n",
209-
"extremes_df = match_extremes(sim, obs, 0.99, cluster = 72)\n",
213+
"\n",
214+
"extremes_df = match_extremes(sim, obs, 0.99, cluster=72)\n",
210215
"extremes_df"
211216
]
212217
},
@@ -217,10 +222,14 @@
217222
"outputs": [],
218223
"source": [
219224
"hv.Points(\n",
220-
" extremes_df[['tdiff','diff']].rename(columns={'tdiff':'time difference (hours)','diff':'peak difference (m)'}), \n",
221-
" kdims=['time difference (hours)','peak difference (m)']).opts(\n",
222-
" size = 8, tools = ['hover']\n",
223-
" )"
225+
" extremes_df[[\"tdiff\", \"diff\"]].rename(\n",
226+
" columns={\"tdiff\": \"time difference (hours)\", \"diff\": \"peak difference (m)\"},\n",
227+
" ),\n",
228+
" kdims=[\"time difference (hours)\", \"peak difference (m)\"],\n",
229+
").opts(\n",
230+
" size=8,\n",
231+
" tools=[\"hover\"],\n",
232+
")"
224233
]
225234
},
226235
{
@@ -244,7 +253,7 @@
244253
"outputs": [],
245254
"source": [
246255
"metrics = get_stats(sim, obs, quantile=0.99, cluster=72)\n",
247-
"pd.DataFrame(dict(stats, **metrics), index=['abed'])"
256+
"pd.DataFrame(dict(stats, **metrics), index=[\"abed\"])"
248257
]
249258
}
250259
],

0 commit comments

Comments
 (0)