|
51 | 51 | "quantile",
|
52 | 52 | "scale",
|
53 | 53 | "sum",
|
| 54 | + "change_datatype", |
54 | 55 | ]
|
55 | 56 |
|
56 | 57 |
|
@@ -94,14 +95,19 @@ def _clone(
|
94 | 95 | start_time: datetime | None = None,
|
95 | 96 | timestep: float | None = None,
|
96 | 97 | items: Sequence[int | DfsDynamicItemInfo] | None = None,
|
| 98 | + datatype: int = None, |
97 | 99 | ) -> DfsFile:
|
98 | 100 | source = DfsFileFactory.DfsGenericOpen(str(infilename))
|
99 | 101 | fi = source.FileInfo
|
100 | 102 |
|
101 | 103 | builder = DfsBuilder.Create(fi.FileTitle, "mikeio", __dfs_version__)
|
102 | 104 |
|
103 | 105 | # Set up the header
|
104 |
| - builder.SetDataType(fi.DataType) |
| 106 | + if datatype is None: |
| 107 | + builder.SetDataType(fi.DataType) |
| 108 | + else: |
| 109 | + builder.SetDataType(datatype) |
| 110 | + |
105 | 111 | builder.SetGeographicalProjection(fi.Projection)
|
106 | 112 |
|
107 | 113 | # Copy time axis
|
@@ -959,3 +965,50 @@ def _get_repeated_items(
|
959 | 965 | new_items.append(item)
|
960 | 966 |
|
961 | 967 | return new_items
|
| 968 | + |
| 969 | + |
| 970 | +def change_datatype( |
| 971 | + infilename: str | pathlib.Path, |
| 972 | + outfilename: str | pathlib.Path, |
| 973 | + datatype: int, |
| 974 | +): |
| 975 | + """Create temporal quantiles of all items in dfs file. |
| 976 | +
|
| 977 | + Parameters |
| 978 | + ---------- |
| 979 | + infilename : str | pathlib.Path |
| 980 | + input filename |
| 981 | + outfilename : str | pathlib.Path |
| 982 | + output filename |
| 983 | + datatype: int |
| 984 | + DataType to be used for the output file |
| 985 | +
|
| 986 | + Examples |
| 987 | + -------- |
| 988 | + >>> quantile("in.dfsu", "out.dfsu", datatype=107) |
| 989 | +
|
| 990 | + >>> quantile("huge.dfsu", "Q01.dfsu", q=0.1, buffer_size=5.0e9) |
| 991 | +
|
| 992 | + """ |
| 993 | + dfs_out = _clone(infilename, outfilename, datatype=datatype) |
| 994 | + dfs_in = DfsFileFactory.DfsGenericOpen(infilename) |
| 995 | + |
| 996 | + item_numbers = _valid_item_numbers(dfs_in.ItemInfo) |
| 997 | + n_items = len(item_numbers) |
| 998 | + n_time_steps = dfs_in.FileInfo.TimeAxis.NumberOfTimeSteps |
| 999 | + deletevalue = dfs_in.FileInfo.DeleteValueFloat |
| 1000 | + |
| 1001 | + # Rewrite the data FIXME: can we do this in a more elegant way? |
| 1002 | + for timestep in trange(n_time_steps, disable=True): |
| 1003 | + for item in range(n_items): |
| 1004 | + itemdata = dfs_in.ReadItemTimeStep(item_numbers[item] + 1, timestep) |
| 1005 | + time = itemdata.Time |
| 1006 | + d = itemdata.Data |
| 1007 | + d[d == deletevalue] = np.nan |
| 1008 | + outdata = d |
| 1009 | + outdata[np.isnan(outdata)] = deletevalue |
| 1010 | + darray = outdata.astype(np.float32) |
| 1011 | + dfs_out.WriteItemTimeStep(item_numbers[item] + 1, timestep, time, darray) |
| 1012 | + |
| 1013 | + dfs_out.Close() |
| 1014 | + dfs_in.Close() |
0 commit comments