from pdal import Pipeline import json import traceback import geopandas as gpd import numpy as np import sys global puntosIN global buffer global entwine args=json.loads(sys.argv[1]) #args['path_entwine'] #args['disk'] #args['path_geojson'] #args['buffer'] #puntosIN=gpd.read_file("./in/"+args['path_geojson']) buffer=gpd.read_file("./in/"+args['buffer']) entwine="./in/"+args['path_entwine'] #{"path_entwine":"../DATA4/entwine_updated_Andalucia_Bloque1M/ept.json","disk":"","path_geojson":"./puntos.geojson","buffer":"2"} #puntosIN=gpd.read_file('./puntos.geojson') #buffer=2 #entwine="../DATA4/entwine_updated_Andalucia_Bloque1M/ept.json" #"../DATA4/entwine_updated_Andalucia_Bloque1M/ept.json" #"./puntos.geojson" def pdal(): #bounds=puntosIN.total_bounds #buffered=puntosIN.buffer(buffer,1) #chull_multipoly=buffered.concave_hull() #chull_multipoly.to_file('./chull_multipoly.geojson',driver="GeoJSON") #quit() # print(buffered) #polygons=list(map(asigna,buffered)) #buffered.to_file('./buffered.geojson',driver="GeoJSON") #quit() #print(buffered[0]) #print(puntosIN) #print(np.column_stack((puntosIN['X'],puntosIN['Y']))) jsonZ=[] readers= { "type": "readers.ept", "filename": entwine, "polygon": str(buffer['geometry'][0]) } writers= { "type": "writers.las", "filename": "./in/"+args['buffer'].rsplit('/',1)[0]+'/'+args['buffer'].rsplit('/',1)[1].rsplit('.',1)[0]+'.laz' } jsonZ.append(readers) jsonZ.append(writers) peticion = json.dumps(jsonZ) pipeline = Pipeline(peticion) count = pipeline.execute() arrays = pipeline.arrays #print('arrays '+ str(arrays)) metadata = pipeline.metadata #print('metadata '+ str(metadata)) log = pipeline.log print(log) if __name__ == '__main__': try: pdal() except Exception: traceback.print_exc()