| 1 | """
|
|---|
| 2 | Aggregation methods for space time raster datasets
|
|---|
| 3 |
|
|---|
| 4 | Usage:
|
|---|
| 5 |
|
|---|
| 6 | .. code-block:: python
|
|---|
| 7 |
|
|---|
| 8 | import grass.temporal as tgis
|
|---|
| 9 |
|
|---|
| 10 | tgis.aggregate_raster_maps(dataset, mapset, inputs, base, start, end, count, method, register_null, dbif)
|
|---|
| 11 |
|
|---|
| 12 | (C) 2012-2013 by the GRASS Development Team
|
|---|
| 13 | This program is free software under the GNU General Public
|
|---|
| 14 | License (>=v2). Read the file COPYING that comes with GRASS
|
|---|
| 15 | for details.
|
|---|
| 16 |
|
|---|
| 17 | :author: Soeren Gebbert
|
|---|
| 18 | """
|
|---|
| 19 |
|
|---|
| 20 | import grass.script as gscript
|
|---|
| 21 | from grass.exceptions import CalledModuleError
|
|---|
| 22 | from .space_time_datasets import RasterDataset
|
|---|
| 23 | from .datetime_math import create_suffix_from_datetime
|
|---|
| 24 | from .datetime_math import create_time_suffix
|
|---|
| 25 | from .datetime_math import create_numeric_suffix
|
|---|
| 26 | from .core import get_current_mapset, get_tgis_message_interface, init_dbif
|
|---|
| 27 | from .spatio_temporal_relationships import SpatioTemporalTopologyBuilder, \
|
|---|
| 28 | create_temporal_relation_sql_where_statement
|
|---|
| 29 | ###############################################################################
|
|---|
| 30 |
|
|---|
| 31 |
|
|---|
| 32 | def collect_map_names(sp, dbif, start, end, sampling):
|
|---|
| 33 | """Gather all maps from dataset using a specific sample method
|
|---|
| 34 |
|
|---|
| 35 | :param sp: The space time raster dataset to select aps from
|
|---|
| 36 | :param dbif: The temporal database interface to use
|
|---|
| 37 | :param start: The start time of the sample interval, may be relative or
|
|---|
| 38 | absolute
|
|---|
| 39 | :param end: The end time of the sample interval, may be relative or
|
|---|
| 40 | absolute
|
|---|
| 41 | :param sampling: The sampling methods to use
|
|---|
| 42 | """
|
|---|
| 43 |
|
|---|
| 44 | use_start = False
|
|---|
| 45 | use_during = False
|
|---|
| 46 | use_overlap = False
|
|---|
| 47 | use_contain = False
|
|---|
| 48 | use_equal = False
|
|---|
| 49 | use_follows = False
|
|---|
| 50 | use_precedes = False
|
|---|
| 51 |
|
|---|
| 52 | # Initialize the methods
|
|---|
| 53 | if sampling:
|
|---|
| 54 | for name in sampling.split(","):
|
|---|
| 55 | if name == "start":
|
|---|
| 56 | use_start = True
|
|---|
| 57 | if name == "during":
|
|---|
| 58 | use_during = True
|
|---|
| 59 | if name == "overlap":
|
|---|
| 60 | use_overlap = True
|
|---|
| 61 | if name == "contain":
|
|---|
| 62 | use_contain = True
|
|---|
| 63 | if name == "equal":
|
|---|
| 64 | use_equal = True
|
|---|
| 65 | if name == "follows":
|
|---|
| 66 | use_follows = True
|
|---|
| 67 | if name == "precedes":
|
|---|
| 68 | use_precedes = True
|
|---|
| 69 |
|
|---|
| 70 | else:
|
|---|
| 71 | use_start = True
|
|---|
| 72 |
|
|---|
| 73 | if sp.get_map_time() != "interval":
|
|---|
| 74 | use_start = True
|
|---|
| 75 | use_during = False
|
|---|
| 76 | use_overlap = False
|
|---|
| 77 | use_contain = False
|
|---|
| 78 | use_equal = False
|
|---|
| 79 | use_follows = False
|
|---|
| 80 | use_precedes = False
|
|---|
| 81 |
|
|---|
| 82 | where = create_temporal_relation_sql_where_statement(start, end,
|
|---|
| 83 | use_start,
|
|---|
| 84 | use_during,
|
|---|
| 85 | use_overlap,
|
|---|
| 86 | use_contain,
|
|---|
| 87 | use_equal,
|
|---|
| 88 | use_follows,
|
|---|
| 89 | use_precedes)
|
|---|
| 90 |
|
|---|
| 91 | rows = sp.get_registered_maps("id", where, "start_time", dbif)
|
|---|
| 92 |
|
|---|
| 93 | if not rows:
|
|---|
| 94 | return None
|
|---|
| 95 |
|
|---|
| 96 | names = []
|
|---|
| 97 | for row in rows:
|
|---|
| 98 | names.append(row["id"])
|
|---|
| 99 |
|
|---|
| 100 | return names
|
|---|
| 101 |
|
|---|
| 102 | ###############################################################################
|
|---|
| 103 |
|
|---|
| 104 |
|
|---|
| 105 | def aggregate_raster_maps(inputs, base, start, end, count, method,
|
|---|
| 106 | register_null, dbif, offset=0):
|
|---|
| 107 | """Aggregate a list of raster input maps with r.series
|
|---|
| 108 |
|
|---|
| 109 | :param inputs: The names of the raster maps to be aggregated
|
|---|
| 110 | :param base: The basename of the new created raster maps
|
|---|
| 111 | :param start: The start time of the sample interval, may be relative or
|
|---|
| 112 | absolute
|
|---|
| 113 | :param end: The end time of the sample interval, may be relative or
|
|---|
| 114 | absolute
|
|---|
| 115 | :param count: The number to be attached to the basename of the new
|
|---|
| 116 | created raster map
|
|---|
| 117 | :param method: The aggreation method to be used by r.series
|
|---|
| 118 | :param register_null: If true null maps will be registered in the space
|
|---|
| 119 | time raster dataset, if false not
|
|---|
| 120 | :param dbif: The temporal database interface to use
|
|---|
| 121 | :param offset: Offset to be added to the map counter to create the map ids
|
|---|
| 122 | """
|
|---|
| 123 |
|
|---|
| 124 | msgr = get_tgis_message_interface()
|
|---|
| 125 |
|
|---|
| 126 | msgr.verbose(_("Aggregating %s raster maps") % (len(inputs)))
|
|---|
| 127 | output = "%s_%i" % (base, int(offset) + count)
|
|---|
| 128 |
|
|---|
| 129 | mapset = get_current_mapset()
|
|---|
| 130 | map_id = output + "@" + mapset
|
|---|
| 131 | new_map = RasterDataset(map_id)
|
|---|
| 132 |
|
|---|
| 133 | # Check if new map is in the temporal database
|
|---|
| 134 | if new_map.is_in_db(dbif):
|
|---|
| 135 | if gscript.overwrite() is True:
|
|---|
| 136 | # Remove the existing temporal database entry
|
|---|
| 137 | new_map.delete(dbif)
|
|---|
| 138 | new_map = RasterDataset(map_id)
|
|---|
| 139 | else:
|
|---|
| 140 | msgr.error(_("Raster map <%(name)s> is already in temporal "
|
|---|
| 141 | "database, use overwrite flag to overwrite" %
|
|---|
| 142 | ({"name": new_map.get_name()})))
|
|---|
| 143 | return
|
|---|
| 144 |
|
|---|
| 145 | msgr.verbose(_("Computing aggregation of maps between %(st)s - %(end)s" % {
|
|---|
| 146 | 'st': str(start), 'end': str(end)}))
|
|---|
| 147 |
|
|---|
| 148 | # Create the r.series input file
|
|---|
| 149 | filename = gscript.tempfile(True)
|
|---|
| 150 | file = open(filename, 'w')
|
|---|
| 151 |
|
|---|
| 152 | for name in inputs:
|
|---|
| 153 | string = "%s\n" % (name)
|
|---|
| 154 | file.write(string)
|
|---|
| 155 |
|
|---|
| 156 | file.close()
|
|---|
| 157 | # Run r.series
|
|---|
| 158 | try:
|
|---|
| 159 | if len(inputs) > 1000:
|
|---|
| 160 | gscript.run_command("r.series", flags="z", file=filename,
|
|---|
| 161 | output=output, overwrite=gscript.overwrite(),
|
|---|
| 162 | method=method)
|
|---|
| 163 | else:
|
|---|
| 164 | gscript.run_command("r.series", file=filename,
|
|---|
| 165 | output=output, overwrite=gscript.overwrite(),
|
|---|
| 166 | method=method)
|
|---|
| 167 |
|
|---|
| 168 | except CalledModuleError:
|
|---|
| 169 | dbif.close()
|
|---|
| 170 | msgr.fatal(_("Error occurred in r.series computation"))
|
|---|
| 171 |
|
|---|
| 172 | # Read the raster map data
|
|---|
| 173 | new_map.load()
|
|---|
| 174 |
|
|---|
| 175 | # In case of a null map continue, do not register null maps
|
|---|
| 176 | if new_map.metadata.get_min() is None and \
|
|---|
| 177 | new_map.metadata.get_max() is None:
|
|---|
| 178 | if not register_null:
|
|---|
| 179 | gscript.run_command("g.remove", flags='f', type='raster',
|
|---|
| 180 | name=output)
|
|---|
| 181 | return None
|
|---|
| 182 |
|
|---|
| 183 | return new_map
|
|---|
| 184 |
|
|---|
| 185 | ##############################################################################
|
|---|
| 186 |
|
|---|
| 187 |
|
|---|
| 188 | def aggregate_by_topology(granularity_list, granularity, map_list, topo_list,
|
|---|
| 189 | basename, time_suffix, offset=0, method="average",
|
|---|
| 190 | nprocs=1, spatial=None, dbif=None, overwrite=False,
|
|---|
| 191 | file_limit=1000):
|
|---|
| 192 | """Aggregate a list of raster input maps with r.series
|
|---|
| 193 |
|
|---|
| 194 | :param granularity_list: A list of AbstractMapDataset objects.
|
|---|
| 195 | The temporal extents of the objects are used
|
|---|
| 196 | to build the spatio-temporal topology with the
|
|---|
| 197 | map list objects
|
|---|
| 198 | :param granularity: The granularity of the granularity list
|
|---|
| 199 | :param map_list: A list of RasterDataset objects that contain the raster
|
|---|
| 200 | maps that should be aggregated
|
|---|
| 201 | :param topo_list: A list of strings of topological relations that are
|
|---|
| 202 | used to select the raster maps for aggregation
|
|---|
| 203 | :param basename: The basename of the new generated raster maps
|
|---|
| 204 | :param time_suffix: Use the granularity truncated start time of the
|
|---|
| 205 | actual granule to create the suffix for the basename
|
|---|
| 206 | :param offset: Use a numerical offset for suffix generation
|
|---|
| 207 | (overwritten by time_suffix)
|
|---|
| 208 | :param method: The aggregation method of r.series (average,min,max, ...)
|
|---|
| 209 | :param nprocs: The number of processes used for parallel computation
|
|---|
| 210 | :param spatial: This indicates if the spatial topology is created as
|
|---|
| 211 | well: spatial can be None (no spatial topology), "2D"
|
|---|
| 212 | using west, east, south, north or "3D" using west,
|
|---|
| 213 | east, south, north, bottom, top
|
|---|
| 214 | :param dbif: The database interface to be used
|
|---|
| 215 | :param overwrite: Overwrite existing raster maps
|
|---|
| 216 | :param file_limit: The maximum number of raster map layers that
|
|---|
| 217 | should be opened at once by r.series
|
|---|
| 218 | :return: A list of RasterDataset objects that contain the new map names
|
|---|
| 219 | and the temporal extent for map registration
|
|---|
| 220 | """
|
|---|
| 221 | import grass.pygrass.modules as pymod
|
|---|
| 222 | import copy
|
|---|
| 223 |
|
|---|
| 224 | msgr = get_tgis_message_interface()
|
|---|
| 225 |
|
|---|
| 226 | dbif, connected = init_dbif(dbif)
|
|---|
| 227 |
|
|---|
| 228 | topo_builder = SpatioTemporalTopologyBuilder()
|
|---|
| 229 | topo_builder.build(mapsA=granularity_list, mapsB=map_list, spatial=spatial)
|
|---|
| 230 |
|
|---|
| 231 | # The module queue for parallel execution
|
|---|
| 232 | process_queue = pymod.ParallelModuleQueue(int(nprocs))
|
|---|
| 233 |
|
|---|
| 234 | # Dummy process object that will be deep copied
|
|---|
| 235 | # and be put into the process queue
|
|---|
| 236 | r_series = pymod.Module("r.series", output="spam", method=[method],
|
|---|
| 237 | overwrite=overwrite, quiet=True, run_=False,
|
|---|
| 238 | finish_=False)
|
|---|
| 239 | g_copy = pymod.Module("g.copy", raster=['spam', 'spamspam'],
|
|---|
| 240 | quiet=True, run_=False, finish_=False)
|
|---|
| 241 | output_list = []
|
|---|
| 242 | count = 0
|
|---|
| 243 |
|
|---|
| 244 | for granule in granularity_list:
|
|---|
| 245 | msgr.percent(count, len(granularity_list), 1)
|
|---|
| 246 | count += 1
|
|---|
| 247 |
|
|---|
| 248 | aggregation_list = []
|
|---|
| 249 |
|
|---|
| 250 | if "equal" in topo_list and granule.equal:
|
|---|
| 251 | for map_layer in granule.equal:
|
|---|
| 252 | aggregation_list.append(map_layer.get_name())
|
|---|
| 253 | if "contains" in topo_list and granule.contains:
|
|---|
| 254 | for map_layer in granule.contains:
|
|---|
| 255 | aggregation_list.append(map_layer.get_name())
|
|---|
| 256 | if "during" in topo_list and granule.during:
|
|---|
| 257 | for map_layer in granule.during:
|
|---|
| 258 | aggregation_list.append(map_layer.get_name())
|
|---|
| 259 | if "starts" in topo_list and granule.starts:
|
|---|
| 260 | for map_layer in granule.starts:
|
|---|
| 261 | aggregation_list.append(map_layer.get_name())
|
|---|
| 262 | if "started" in topo_list and granule.started:
|
|---|
| 263 | for map_layer in granule.started:
|
|---|
| 264 | aggregation_list.append(map_layer.get_name())
|
|---|
| 265 | if "finishes" in topo_list and granule.finishes:
|
|---|
| 266 | for map_layer in granule.finishes:
|
|---|
| 267 | aggregation_list.append(map_layer.get_name())
|
|---|
| 268 | if "finished" in topo_list and granule.finished:
|
|---|
| 269 | for map_layer in granule.finished:
|
|---|
| 270 | aggregation_list.append(map_layer.get_name())
|
|---|
| 271 | if "overlaps" in topo_list and granule.overlaps:
|
|---|
| 272 | for map_layer in granule.overlaps:
|
|---|
| 273 | aggregation_list.append(map_layer.get_name())
|
|---|
| 274 | if "overlapped" in topo_list and granule.overlapped:
|
|---|
| 275 | for map_layer in granule.overlapped:
|
|---|
| 276 | aggregation_list.append(map_layer.get_name())
|
|---|
| 277 |
|
|---|
| 278 | if aggregation_list:
|
|---|
| 279 | msgr.verbose(_("Aggregating %(len)i raster maps from %(start)s to"
|
|---|
| 280 | " %(end)s") %({"len": len(aggregation_list),
|
|---|
| 281 | "start": str(granule.temporal_extent.get_start_time()),
|
|---|
| 282 | "end": str(granule.temporal_extent.get_end_time())}))
|
|---|
| 283 |
|
|---|
| 284 | if granule.is_time_absolute() is True and time_suffix == 'gran':
|
|---|
| 285 | suffix = create_suffix_from_datetime(granule.temporal_extent.get_start_time(),
|
|---|
| 286 | granularity)
|
|---|
| 287 | output_name = "{ba}_{su}".format(ba=basename, su=suffix)
|
|---|
| 288 | elif granule.is_time_absolute() is True and time_suffix == 'time':
|
|---|
| 289 | suffix = create_time_suffix(granule)
|
|---|
| 290 | output_name = "{ba}_{su}".format(ba=basename, su=suffix)
|
|---|
| 291 | else:
|
|---|
| 292 | output_name = create_numeric_suffix(basename, count + int(offset),
|
|---|
| 293 | time_suffix)
|
|---|
| 294 |
|
|---|
| 295 | map_layer = RasterDataset("%s@%s" % (output_name,
|
|---|
| 296 | get_current_mapset()))
|
|---|
| 297 | map_layer.set_temporal_extent(granule.get_temporal_extent())
|
|---|
| 298 |
|
|---|
| 299 | if map_layer.map_exists() is True and overwrite is False:
|
|---|
| 300 | msgr.fatal(_("Unable to perform aggregation. Output raster "
|
|---|
| 301 | "map <%(name)s> exists and overwrite flag was "
|
|---|
| 302 | "not set" % ({"name": output_name})))
|
|---|
| 303 |
|
|---|
| 304 | output_list.append(map_layer)
|
|---|
| 305 |
|
|---|
| 306 | if len(aggregation_list) > 1:
|
|---|
| 307 | # Create the r.series input file
|
|---|
| 308 | filename = gscript.tempfile(True)
|
|---|
| 309 | file = open(filename, 'w')
|
|---|
| 310 | for name in aggregation_list:
|
|---|
| 311 | string = "%s\n" % (name)
|
|---|
| 312 | file.write(string)
|
|---|
| 313 | file.close()
|
|---|
| 314 |
|
|---|
| 315 | mod = copy.deepcopy(r_series)
|
|---|
| 316 | mod(file=filename, output=output_name)
|
|---|
| 317 | if len(aggregation_list) > int(file_limit):
|
|---|
| 318 | msgr.warning(_("The limit of open files (%i) was "\
|
|---|
| 319 | "reached (%i). The module r.series will "\
|
|---|
| 320 | "be run with flag z, to avoid open "\
|
|---|
| 321 | "files limit exceeding."%(int(file_limit),
|
|---|
| 322 | len(aggregation_list))))
|
|---|
| 323 | mod(flags="z")
|
|---|
| 324 | process_queue.put(mod)
|
|---|
| 325 | else:
|
|---|
| 326 | mod = copy.deepcopy(g_copy)
|
|---|
| 327 | mod(raster=[aggregation_list[0], output_name])
|
|---|
| 328 | process_queue.put(mod)
|
|---|
| 329 |
|
|---|
| 330 | process_queue.wait()
|
|---|
| 331 |
|
|---|
| 332 | if connected:
|
|---|
| 333 | dbif.close()
|
|---|
| 334 |
|
|---|
| 335 | msgr.percent(1, 1, 1)
|
|---|
| 336 |
|
|---|
| 337 | return output_list
|
|---|