# -*- coding: utf-8 -*- """IA detection module. This module has the function called by /AI/skynet/detection This executes AI detection with the given model to detect the given entities. """ import os import tempfile import typing import datetime import distutils.util import termcolor # Fix gdal 2.40 and 3.3 integration problems try: import gdal import ogr except ModuleNotFoundError: from osgeo import gdal, ogr from django.conf import settings # Fix of python 3.7 upgrade, OTB not working with this version try: import otbApplication except (ModuleNotFoundError, ImportError): pass from tools import const from tools.drivers.driver import Drivers from tools.colException import ColException from tools.layer import Layer, OutLayer from GMS.tools.gdal_python import is_overlapping, square_grid_polygon from MG.tools.postgis_api import PostGis # Custom errors ################################################################################### NoTilesError = type('NoTilesError', (ColException, ), {}) # SQL SENTENCES #################################################################################### POSTGRES_CONNECTION_STRING = "dbname={postgis_dbname} host={postgis_host} port={postgis_port} " \ "user={postgis_user} password={postgis_password} " \ "schemas={postgis_schemas}" GDAL_WARP_SENTENCE = 'gdalwarp -q -multi -cutline "{cut_shape}" -dstnodata 0 -overwrite ' \ '-crop_to_cutline ' \ '-r near "{source}" "{dst}" ' INDEXING_TABLE = 'CREATE INDEX \"{index_name}\" ON \"{schema}\".\"{table}\" USING ' \ 'gist({geometry_field})' MERGE_TABLE = "INSERT INTO \"{schema}\".\"{master_table}\" (SELECT (select * from nextval" \ "('\"{schema}\".\"{master_table}_ogc_fid_seq\"')), label, \"nbPixels\", \"meanB0\"," \ " \"meanB1\", \"meanB2\", \"varB0\", \"varB1\", \"varB2\", \"wkb_geometry\" " \ "FROM \"{schema}\".\"{table_in}\");" FIXED = "CREATE TABLE \"{schema}\".\"{output}\" AS SELECT ogc_fid, label, \"nbPixels\"," \ " \"meanB0\", \"meanB1\", \"meanB2\", \"varB0\", \"varB1\", \"varB2\", " \ "ST_Buffer(ST_MakeValid(\"wkb_geometry\"), 0.0) as \"wkb_geometry\" " \ "FROM \"{schema}\".\"{input}\";" GDAL_WARP_SENTENCE_2 = 'gdalwarp -co "COMPRESS=NONE" -q -multi -cutline "{cut_shape}" ' \ '-dstnodata 0 -overwrite -crop_to_cutline -r near ' \ '-ts 0.25 -0.25 "{source}" "{dst}" ' FIRST_CLEAN = "CREATE TABLE \"{schema}\".\"{table_new}\" " \ "AS select ST_MakeValid((ST_Dump(wkb_geometry)).geom) as geom, " \ "ST_Area(wkb_geometry) as area from \"{schema}\".\"{table_old}\" WHERE " \ "ST_Area(wkb_geometry) > POWER({GSD}*{num_pixels},2);" ADD_SERIAL = "Alter table \"{schema}\".\"{table}\" add column id serial;" SET_SRID = "ALTER TABLE \"{schema}\".\"{table}\" " \ "ALTER COLUMN {geom_field} TYPE geometry(POLYGON, {srid}) " \ "USING ST_Transform({geom_field}, {srid});" SIZE_CLEANING = "CREATE TABLE \"{schema}\".\"{dst_table}\" AS select * " \ "from \"{schema}\".\"{src_table}\" " \ "WHERE ST_Area(\"{geometry_field}\") > " \ "{pixels_size}*POWER({GSD},2);" GROUP_BY = "CREATE TABLE \"{schema}\".\"{dst_table}\" AS SELECT \"{field_by}\", " \ "ST_MakeValid((ST_Dump(ST_UNION(\"{geometry_field}\"))).geom) as \"{geometry_field}\"" \ "from \"{schema}\".\"{src_table}\" GROUP BY \"{field_by}\"" SELECT_BY = "CREATE TABLE \"{schema}\".\"{dst_table}\" AS SELECT * FROM \"{schema}\".\"{src_table}\"" \ " WHERE \"{field}\"={field_value}" CHANGE_TO_4326 = "gdalwarp -t_srs \"EPSG:4326\" {srcfile} {dstfile}" SET_CLEAN_FUNCTION = """CREATE OR REPLACE FUNCTION funciones.ia_limpieza( esch text, vw_salida text, tabla_entrada text, area_min text, id_field text, geom_field text) RETURNS void LANGUAGE 'plpgsql' COST 100 VOLATILE AS $BODY$ DECLARE geome1 character varying; begin EXECUTE 'alter table "'|| esch ||'"."'|| tabla_entrada ||'" rename to __espac1'; SELECT f_geometry_column into geome1 FROM geometry_columns WHERE f_table_name = '__espac1'; EXECUTE 'alter table "'|| esch ||'".__espac1 rename to "'|| tabla_entrada ||'"';EXECUTE 'drop table if exists "'|| esch ||'".areas_grandes;'; EXECUTE 'create table "'|| esch ||'".areas_grandes as select * from (select "'|| id_field ||'",ROUND(st_area ('|| geome1 ||'::Geography)::numeric,3),entity_index,score,'|| geome1 ||' from "'|| esch ||'"."'|| tabla_entrada||'")a where ROUND::numeric > '''|| area_min ||''';'; EXECUTE 'CREATE INDEX " '|| esch ||'_areas_grandes_geom" ON "'|| esch ||'".areas_grandes USING GIST ("'|| geom_field ||'");'; EXECUTE 'drop table if exists "'|| esch ||'".areas_peque;'; EXECUTE 'create table "'|| esch ||'".areas_peque as select * from (select "'|| id_field ||'",ROUND(st_area ("'|| geom_field ||'"::Geography)::numeric,3),entity_index,score,st_centroid("'|| geom_field ||'") as geoma,"'|| geom_field ||'" from "'|| esch ||'"."'|| tabla_entrada||'")a where ROUND::numeric <= '''|| area_min ||''';'; EXECUTE 'CREATE INDEX "'|| esch ||'_areas_peque_geom" ON "'|| esch ||'".areas_peque USING GIST ("'|| geom_field ||'");';EXECUTE 'drop table if exists "'|| esch ||'".areas_peque_id;'; EXECUTE 'create table "'|| esch ||'".areas_peque_id as select a."'|| id_field ||'",b."'|| geom_field ||'" from "'|| esch ||'".areas_grandes a, "'|| esch ||'".areas_peque b where st_contains (a."'|| geom_field ||'",b.geoma);'; EXECUTE 'CREATE INDEX "'|| esch ||'_areas_peque_id_geom" ON "'|| esch ||'".areas_peque_id USING GIST ("'|| geom_field ||'");';EXECUTE 'drop table if exists "'|| esch ||'".areas_peque_no;'; EXECUTE 'create table "'|| esch ||'".areas_peque_no as select a."'|| geom_field ||'" from (select a."'|| geom_field ||'",b."'|| id_field ||'" from "'|| esch ||'".areas_peque a left join "'|| esch ||'".areas_peque_id b on st_intersects (a.geoma,b."'|| geom_field ||'"))a where "'|| id_field ||'" is null;'; EXECUTE 'CREATE INDEX "'|| esch ||'_areas_peque_no_geom" ON "'|| esch ||'".areas_peque_no USING GIST ("'|| geom_field ||'");';EXECUTE 'drop table if exists "'|| esch ||'".areas_peque_no_id;'; EXECUTE 'create table "'|| esch ||'".areas_peque_no_id as select a."'|| id_field ||'",b."'|| geom_field ||'" from "'|| esch ||'".areas_grandes a, "'|| esch ||'".areas_peque_no b where st_touches (a."'|| geom_field ||'",b."'|| geom_field ||'");'; EXECUTE 'CREATE INDEX "'|| esch ||'_areas_peque_no_id_geom" ON "'|| esch ||'".areas_peque_no_id USING GIST ("'|| geom_field ||'");';EXECUTE 'drop table if exists "'|| esch ||'".ia_fin;'; EXECUTE 'create table "'|| esch ||'".ia_fin as select "'|| id_field ||'" ,st_union("'|| geom_field ||'") as "'|| geom_field ||'" from (select "'|| id_field ||'","'|| geom_field ||'" from "'|| esch ||'".areas_peque_id union all select "'|| id_field ||'","'|| geom_field ||'" from "'|| esch ||'".areas_grandes union all select "'|| id_field ||'","'|| geom_field ||'" from "'|| esch ||'".areas_peque_no_id)a group by "'|| id_field ||'";';EXECUTE 'drop table if exists "'|| esch ||'"."'||vw_salida||'";'; EXECUTE 'create table "'|| esch ||'"."'||vw_salida||'" as select a.*,b."'|| geom_field ||'" as geoma from "'|| esch ||'".ia_fin b, "'|| esch ||'"."'|| tabla_entrada||'" a where a."'|| id_field ||'" =b."'|| id_field ||'";'; EXECUTE 'alter table "'|| esch ||'"."'||vw_salida||'" drop column '|| geome1 ||';'; EXECUTE 'alter table "'|| esch ||'"."'||vw_salida||'" rename geoma to "'|| geom_field ||'";';EXECUTE 'drop table if exists "'|| esch ||'".ia_fin;'; EXECUTE 'drop table if exists "'|| esch ||'".areas_grandes;'; EXECUTE 'drop table if exists "'|| esch ||'".areas_peque_no;'; EXECUTE 'drop table if exists "'|| esch ||'".areas_peque_id;'; EXECUTE 'drop table if exists "'|| esch ||'".areas_peque;'; EXECUTE 'drop table if exists "'|| esch ||'".areas_peque_no_id;';end; $BODY$;""" # ################################################################################################## def post(task, user: str, aoi: Layer, orthophoto: Layer, output_layer: OutLayer, indexes: typing.List, model: str, lidar: Layer = Layer.optional(), ms: Layer = Layer.optional(), confident: float = 0.9, is_optimized: str = 'True'): """ Detect new cartographic entities by orthophoto, rasterized lidar or multisensing images Parameters ---------- user: str User ID. aoi: FTP Connection Connection to aoi source (KML). orthophoto: FTP Connection Connection to ortophoto source (dir or file). lidar: FTP Connection Connection to dem raster source. ms: FTP Connection Connection to merged sentinel images (all bands except 10, Sentinel). output_layer: FTP Connection Connection to save result. indexes: list List of entities to detect. model: str Name of the model to use. confident: float Confident of detected entities. Returns ------- output_layer: Connection Connection where result is saved. If there are more than one indexes to detect the results, connection will be a ZIP connection. Inside ZIP there are one file per index detected named as {source_name}__{user}__{timestamp} Notes ----- - User id must by lowercase. - Indexes available: [edificios, piscinas] - Coming soon: carreteras dobles, carreteras únicas, torres de tendido, vías ciclistas, vías de ferrocarril ... Examples -------- >>> import requests >>> aoi = {"ip": "192.168.1.214", ... "port": 8001, ... "protocol": 'FTP', ... "user": "test", ... "password": "test", ... "layer_name": "AOI_little", ... "driver_type": "KML", ... "source": "/AOI_little.kml"} >>> orthophoto = {"ip": "192.168.1.214", ... "port": 8001, ... "protocol": 'FTP', ... "user": "test", ... "password": "test", ... "layer_name": "", ... "driver_type": "ECW", ... "source": "/ORTO"} >>> lidar = {"ip": "192.168.1.214", ... "port": 8001, ... "protocol": 'FTP', ... "user": "test", ... "password": "test", ... "layer_name": "", ... "driver_type": "GeoTIFF", ... "source": "/PointCloud"} >>> ms = {"ip": "192.168.1.214", ... "port": 8001, ... "protocol": 'FTP', ... "user": "test", ... "password": "test", ... "layer_name": "", ... "driver_type": "GeoTIFF", ... "source": "/MS"} >>> parameters={'user':'test', "aoi":aoi, "orthophoto":orthophoto, ... "lidar":lidar, "ms":ms, ... 'output_layer':output_layer,'indexes':['edificios', 'piscinas']} >>> response = requests.post(':/AI/skyNet/new_entities', ... json=parameters) >>> response.json() {'task_id': 'XoiRl9'} """ now = datetime.datetime.now() # current date and time timestamp = now.strftime("%Y%m%dT%H%M%S") task.status_description = 'Checking connections' orto = orthophoto if is_optimized.lower() in ['y', 'yes', 't', 'true', 'on', '1', 'n', 'no', 'f', 'false', 'off', '0']: is_optimized = distutils.util.strtobool(is_optimized.lower()) else: is_optimized = False task.status_description = "Loading AI Model: {}".format(model) task.set_progress(5) # # Loading IA model env_conf = settings.ENV_CONF ai_model = env_conf['nets'][model] ai_model.load(task.task_id) indexes_map = ai_model.indexes_map if indexes[0] == 'all': indexes_dict = {str(index['index']): {"type": index['type'], "name": list(index.keys())[0]} for index in indexes_map['all']} else: indexes_dict = {str(indexes_map[indexes_string]['index']): {"type": indexes_map[indexes_string]['type'], "name": indexes_string} for indexes_string in indexes} # Get feature of AOI layer aoi_ds = ogr.Open(aoi.gdal_layer()) aoi_layer = aoi_ds.GetLayer() aoi_crs = aoi_layer.GetSpatialRef() aoi_feature = aoi_layer.GetFeature(0) aoi_geom = aoi_feature.GetGeometryRef() tmp_directory = tempfile.mkdtemp(dir='tmp') # Get available drivers drivers = Drivers() task.set_progress(10) # GENERATING VRT'S ######################################################### # Check if is file or directory. If it is a directory an vrt is built task.status_description = "Building vrt's" vrt_string = 'gdalbuildvrt {result} {files}' # ORTO VRT ################################################################# if orto.protocol.is_directory(orto['source']): if orto['layer_name']: orto_path = orto.gdal_layer() else: vrt_files = [] for file in orto.protocol.list_files(orto['source'], orto.driver.format): orto_parameters = orto.parameters.copy() orto_parameters['source'] = orto.protocol.join( orto_parameters['source'], file) new_layer = Layer(user, orto_parameters) gdal_url = new_layer.gdal_layer() if is_overlapping(gdal_url, aoi_geom): vrt_files.append(gdal_url) del new_layer orto_path = tempfile.NamedTemporaryFile(suffix='.vrt', prefix='orto_', dir=tmp_directory).name command = vrt_string.format(result=orto_path, files=' '.join(vrt_files)) os.system(command) else: orto_path = orto.gdal_layer() orto_path_warp = tempfile.NamedTemporaryFile(suffix='.vrt', prefix='orto_warp_', dir=tmp_directory).name # Changing srs of raster command = CHANGE_TO_4326.format(srcfile=orto_path, dstfile=orto_path_warp) os.system(command) orto_path = orto_path_warp # Getting pixel size ds_orto = gdal.Open(orto_path) gsd = ds_orto.GetGeoTransform()[1] # PointCloud ############################################################### if lidar.protocol.is_directory(lidar['source']): vrt_files = [] for file in lidar.protocol.list_files(lidar['source'], lidar.driver.format): lidar_parameter = lidar.parameters.copy() lidar_parameter['source'] = lidar.protocol.join( lidar['source'], file) new_layer = Layer(user, lidar_parameter) gdal_url = new_layer.gdal_layer() if is_overlapping(gdal_url, aoi_geom): vrt_files.append(gdal_url) del new_layer lidar_path = tempfile.NamedTemporaryFile(suffix='.vrt', prefix='lidar_', dir=tmp_directory).name os.system(vrt_string.format(result=lidar_path, files=' '.join(vrt_files))) else: lidar_path = lidar.protocol.gdal_url(lidar['source']) lidar_path_warp = tempfile.NamedTemporaryFile(suffix='.vrt', prefix='orto_warp_', dir=tmp_directory).name # Changing srs of raster command = CHANGE_TO_4326.format(srcfile=lidar_path, dstfile=lidar_path_warp) os.system(command) lidar_path = lidar_path_warp # MSensing ################################################################# if ms.protocol.is_directory(ms['source']): vrt_files = [] for file in ms.protocol.list_files(ms['source'], ms.driver.format): ms_parameter = ms.parameters.copy() ms_parameter['source'] = ms.protocol.join(ms['source'], file) new_layer = Layer(user, ms_parameter) gdal_url = new_layer.gdal_layer() if is_overlapping(gdal_url, aoi_geom): vrt_files.append(gdal_url) ms_path = tempfile.NamedTemporaryFile(suffix='.vrt', prefix='ms_', dir=tmp_directory).name os.system(vrt_string.format(result=ms_path, files=' '.join(vrt_files))) else: ms_path = ms.gdal_url(ms['source']) ms_path_warp = tempfile.NamedTemporaryFile(suffix='.vrt', prefix='orto_warp_', dir=tmp_directory).name # Changing srs of raster command = CHANGE_TO_4326.format(srcfile=ms_path, dstfile=ms_path_warp) os.system(command) ms_path = ms_path_warp # ########################################################################## task.set_progress(10) postgis_obj = PostGis('Public') # ########################################################################## # DETECTION ################################################################ task.status_description = "Tiling area of interest" zoom_levels = ai_model['zoom_levels'] selected_entities = [band_ for band_ in range(1, len(ai_model.entities_list)+1) if str(band_) in list(indexes_dict.keys())] task.set_progress(15) total_zoom = len(zoom_levels) otb_windows_size = 0 if total_zoom == 0: raise NoTilesError('There are no tiles to process') # Creating master table of detections master_detections_table = postgis_obj.get_unique_name( user, prefix='master_detection_') create_master_entities_table = "create table \"{schema}\".\"{master_table}\" (ogc_fid serial " \ "NOT NULL," \ "\"wkb_geometry\" geometry, " \ "entity numeric DEFAULT 0," \ "score numeric DEFAULT 0.0);" postgis_obj.send_sql_command(create_master_entities_table.format( schema=user, master_table=master_detections_table )) for windows_size_index, window_size in enumerate(zoom_levels): print(termcolor.colored('Processing {} zoom level: {} m'.format( windows_size_index, window_size), 'green')) grid_shape = tempfile.NamedTemporaryFile(suffix=".shp", dir=tmp_directory).name tiles = square_grid_polygon(window_size, aoi_geom, gen_grid=grid_shape, offset=5) cut_shape_temp = tempfile.NamedTemporaryFile(suffix=".kml", prefix='tile_', dir=tmp_directory).name kml_driver = ogr.GetDriverByName('KML') total_tiles = len(tiles) if total_tiles == 0: raise NoTilesError('There are no tiles to process') optimized_shapes = [] for tile_index, tile in enumerate(tiles): task.status_description = "Detecting on tiles: {}/{} " \ "({}/{})".format(tile_index+1, total_tiles, windows_size_index+1, len(zoom_levels)) task.set_progress(15 + ( (windows_size_index+tile_index / total_tiles)/total_zoom) * 70) print('Progress {}/{}'.format(tile_index + 1, total_tiles)) shp_datasource = kml_driver.CreateDataSource(cut_shape_temp) layer = shp_datasource.CreateLayer(cut_shape_temp, geom_type=ogr.wkbPolygon, srs=tile.GetSpatialReference()) feature = ogr.Feature(layer.GetLayerDefn()) feature.SetGeometry(tile) layer.CreateFeature(feature) feature.Destroy() del shp_datasource orto_png_path = tempfile.NamedTemporaryFile(suffix=".tif", prefix='tile_orto_', dir=tmp_directory).name lidar_crop = tempfile.NamedTemporaryFile(suffix=".tif", prefix='tile_', dir=tmp_directory).name ms_crop = tempfile.NamedTemporaryFile(suffix=".tif", prefix='tile_', dir=tmp_directory).name os.system(GDAL_WARP_SENTENCE.format(cut_shape=cut_shape_temp, source=orto_path, dst=orto_png_path)) os.system(GDAL_WARP_SENTENCE.format(cut_shape=cut_shape_temp, source=lidar_path, dst=lidar_crop)) os.system(GDAL_WARP_SENTENCE.format(cut_shape=cut_shape_temp, source=ms_path, dst=ms_crop)) if is_optimized and windows_size_index == otb_windows_size and \ ai_model['otb_enabled']: ranger = 15 spatialr = 10 shp_geometry = tempfile.NamedTemporaryFile( prefix='OTB_tile_{}_'.format(tile_index), suffix='.shp', dir=tmp_directory).name # (https://www.orfeo-toolbox.org/CookBook/recipes/improc.html#large-scale-mean- \ # shift-lsms-segmentation) app = otbApplication.Registry.CreateApplication( "LargeScaleMeanShift") app.SetParameterString("in", orto_png_path) app.SetParameterInt("spatialr", spatialr) app.SetParameterFloat("ranger", ranger) app.SetParameterInt("tilesizex", 800) app.SetParameterInt("tilesizey", 800) app.SetParameterInt("minsize", 10) app.SetParameterString("mode.vector.out", shp_geometry) app.ExecuteAndWriteOutput() command = 'ogr2ogr -q -f "{}" -lco OVERWRITE=yes -lco LAUNDER=no ' \ 'PG:"host={} port={} dbname={} user={} password={} schemas={}" ' \ '-t_srs EPSG:{} {}'.format('PostgreSQL', postgis_obj.ip, postgis_obj.port, postgis_obj.dbname, postgis_obj.user, postgis_obj.passw, user, aoi_crs.GetAuthorityCode( None), shp_geometry) os.system(command) otb_table = shp_geometry.split(os.sep)[-1].split('.')[0] sql_command = INDEXING_TABLE.format( index_name="{}_{}".format(user, otb_table), schema=user, table=otb_table, geometry_field='wkb_geometry') postgis_obj.send_sql_command(sql_command) optimized_shapes.append(otb_table) shape_driver = Drivers().get_by_name(const.SHAPE_KEY) shape_driver.remove(shp_geometry) del app, shp_geometry #################################################################### print('Identifying') result_dict = ai_model.detect(orto_png_path, windows_size_index) # Creating tif result orto_ds = gdal.Open(orto_png_path) xsize = orto_ds.GetRasterBand(1).XSize ysize = orto_ds.GetRasterBand(1).YSize ground_truth = orto_ds.GetGeoTransform() filtered_results = ai_model.filter(result_dict, indexes_dict.keys(), float(confident)) to_array = ai_model.to_array(filtered_results, [xsize, ysize]) for entity in indexes_dict.keys(): entity = int(entity) result = tempfile.NamedTemporaryFile(suffix='.tif', prefix='result_', dir=tmp_directory).name raster_driver = gdal.GetDriverByName( drivers.get_by_name('GeoTIFF').gdal_driver) result_dataset = raster_driver.Create(result, xsize, ysize, 1, gdal.GDT_UInt16) result_dataset.SetGeoTransform(ground_truth) result_dataset.SetProjection(orto_ds.GetProjection()) result_array = ai_model.to_raster(to_array, entity) result_dataset.GetRasterBand(1).WriteArray(result_array) result_dataset.GetRasterBand(1).SetNoDataValue(0) del result_dataset postgres_conn = POSTGRES_CONNECTION_STRING.format( postgis_dbname=postgis_obj.dbname, postgis_host=postgis_obj.ip, postgis_port=postgis_obj.port, postgis_user=postgis_obj.user, postgis_password=postgis_obj.passw, postgis_schemas=user ) postgis_table = postgis_obj.get_unique_name( user, suffix='_{}'.format(entity)) command = 'gdal_polygonize.py {} -b {} -q -f "PostgreSQL" PG:"{}" {}'\ .format(result, 1, postgres_conn, postgis_table) os.system(command) os.remove(result) command = 'INSERT INTO \"{schema}\".\"{dst_table}\" (wkb_geometry, entity, ' \ 'score) SELECT wkb_geometry, ceil(dn/100)::Integer, (dn - ceil(dn/100)*100)/10 ' \ 'FROM \"{schema}\".\"{src_table}\"' postgis_obj.send_sql_command(command.format( schema=user, dst_table=master_detections_table, src_table=postgis_table, geometry_field='wkb_geometry', entity_index=entity)) postgis_obj.delete_tables(user, postgis_table) del to_array, orto_ds # Removing tiled images os.remove(orto_png_path) if os.path.exists(lidar_crop): os.remove(lidar_crop) if os.path.exists(ms_crop): os.remove(ms_crop) # Join optimized shapes # # First, create master table. if is_optimized and windows_size_index == otb_windows_size and \ ai_model['otb_enabled']: create_master_table = "create table \"{schema}\".\"{master_table}\" " \ "(ogc_fid serial " \ "NOT NULL," \ "label INTEGER," \ "\"nbPixels\" double precision," \ "\"meanB0\" double precision," \ "\"meanB1\" double precision," \ "\"meanB2\" double precision," \ "\"varB0\" double precision," \ "\"varB1\" double precision," \ "\"varB2\" double precision," \ "\"wkb_geometry\" geometry);" master_table = postgis_obj.get_unique_name(user, 'master_otb_') postgis_obj.send_sql_command(create_master_table.format( schema=user, master_table=master_table)) for otb_table in optimized_shapes: postgis_obj.send_sql_command(MERGE_TABLE.format( schema=user, master_table=master_table, table_in=otb_table)) postgis_obj.delete_tables(user, otb_table,) # Apply 0 buffer to avoid possible self intersection master_otb_fixed = postgis_obj.get_unique_name(user, 'master_otb_fixed') postgis_obj.send_sql_command(FIXED.format(schema=user, output=master_otb_fixed, input=master_table)) # Indexing master otb table postgis_obj.send_sql_command(INDEXING_TABLE.format( index_name="{}_{}".format(user, master_otb_fixed), schema=user, table=master_otb_fixed, geometry_field='wkb_geometry')) postgis_obj.delete_tables(user, master_table, ) otb_table = master_otb_fixed # Removing last tiles and grids os.remove(cut_shape_temp) shape_driver = Drivers().get_by_name(const.SHAPE_KEY) shape_driver.remove(grid_shape) # ########################################################################## # ########################################################################## # Unloading ai_model ai_model.unload(task.task_id) postgis_obj.send_sql_command(SET_CLEAN_FUNCTION) tables = [] for entity in selected_entities: table_raw = "{entity_name}__raw_{timestamp}" \ .format(entity_name=indexes_dict[str(entity)]["name"], user=user, timestamp=timestamp) postgis_obj.send_sql_command(SELECT_BY.format(schema=user, dst_table=table_raw, src_table=master_detections_table, field='entity', field_value=entity)) tables.append(table_raw) postgis_obj.delete_tables(user, master_detections_table, ) output_layer.attach_sources(tables) if is_optimized and ai_model['otb_enabled']: output_layer.attach_sources(otb_table, ) # ########################################################################## task.status_description = "Uploading result" output_layer.residual_sources.append(tmp_directory) return output_layer POST_REQUEST = { 'function': post, 'parser': const.PARSER_NONBLOCKING }