from pdal import Pipeline import json import traceback import geopandas as gpd import numpy as np import sys import os import subprocess import ast global puntosIN global buffer global entwine args=json.loads(sys.argv[1]) #args['path_entwine'] #args['disk'] #args['path_geojson'] #args['buffer'] #puntosIN=gpd.read_file("./in/"+args['path_geojson']) buffer=gpd.read_file("./in/"+args['buffer']) entwine="./in/"+args['path_entwine'] #{"path_entwine":"../DATA4/entwine_updated_Andalucia_Bloque1M/ept.json","disk":"","path_geojson":"./puntos.geojson","buffer":"2"} #puntosIN=gpd.read_file('./puntos.geojson') #buffer=2 #entwine="../DATA4/entwine_updated_Andalucia_Bloque1M/ept.json" #"../DATA4/entwine_updated_Andalucia_Bloque1M/ept.json" #"./puntos.geojson" def pdal(i,geom,clasifs): #bounds=puntosIN.total_bounds #buffered=puntosIN.buffer(buffer,1) #chull_multipoly=buffered.concave_hull() #chull_multipoly.to_file('./chull_multipoly.geojson',driver="GeoJSON") #quit() # print(buffered) #polygons=list(map(asigna,buffered)) #buffered.to_file('./buffered.geojson',driver="GeoJSON") #quit() #print(buffered[0]) #print(puntosIN) #print(np.column_stack((puntosIN['X'],puntosIN['Y']))) limits="" for i,clasif in enumerate(clasifs): if i==len(clasifs)-1: limits+="Classification[{}:{}]".format(clasif,clasif) continue limits+="Classification[{}:{}],".format(clasif,clasif) jsonZ=[] readers= { "type": "readers.ept", "filename": entwine, "polygon": str(geom) } filters={ "type":"filters.range", "limits":limits } writers= { "type": "writers.las", "filename": "./in/"+args['buffer'].rsplit('/',1)[0]+'/'+args['identificador']+'/'+args['buffer'].rsplit('/',1)[1].rsplit('.',1)[0]+str(i)+'.laz' } jsonZ.append(readers) jsonZ.append(filters) jsonZ.append(writers) peticion = json.dumps(jsonZ) pipeline = Pipeline(peticion) count = pipeline.execute() arrays = pipeline.arrays #print('arrays '+ str(arrays)) metadata = pipeline.metadata #print('metadata '+ str(metadata)) log = pipeline.log print(log) def joinlaz(): origen = "./in/"+args['buffer'].rsplit('/',1)[0]+'/'+args['identificador']+'/*.laz' destino = "./in/"+args['buffer'].rsplit('/',1)[0]+'/'+args['buffer'].rsplit('/',1)[1].rsplit('.',1)[0]+'.laz' json_to_save = [ origen, destino ] peticion = json.dumps(json_to_save) pipeline = Pipeline(peticion) count = pipeline.execute() arrays = pipeline.arrays print('arrays '+ str(arrays)) metadata = pipeline.metadata print('metadata '+ str(metadata)) log = pipeline.log print('log '+ str(log)) if __name__ == '__main__': try: subprocess.run('mkdir {}'.format("./in/"+args['buffer'].rsplit('/',1)[0]+'/'+args['identificador']),shell=True) for i,geom in enumerate(buffer['geometry']): pdal(i,geom,ast.literal_eval(args['clasifs'])) joinlaz() except Exception: traceback.print_exc()