diff --git a/configs/configuraciones_generales.yaml b/configs/configuraciones_generales.yaml index 2e3edca..b88c980 100755 --- a/configs/configuraciones_generales.yaml +++ b/configs/configuraciones_generales.yaml @@ -62,7 +62,7 @@ nombres_variables_gps: # Información para procesamiento de líneas nombre_archivo_informacion_lineas: "lineas_amba_test.csv" # Archivo .csv con lineas, debe contener ("id_linea", "nombre_linea", "modo") lineas_contienen_ramales: True # Especificar si las líneas de colectivo contienen ramales -nombre_archivo_paradas: +nombre_archivo_paradas: "stops.csv" imprimir_lineas_principales: 5 # Imprimir las lineas principales - "All" imprime todas las líneas # Servicios GPS @@ -105,3 +105,4 @@ zonificaciones: orden5: poligonos: # Especificar una capa geográfica de polígonos en formato .geojson. El archivo requiere las siguientes columnas: ['id', 'tipo', 'geometry']. 'id' es el id o nombre del polígono, tipo puede ser 'poligono' o 'cuenca'. +tiempos_viaje_estaciones: \ No newline at end of file diff --git a/configs/configuraciones_generales_2019_m1.yaml b/configs/configuraciones_generales_2019_m1.yaml index 5abc967..3188544 100644 --- a/configs/configuraciones_generales_2019_m1.yaml +++ b/configs/configuraciones_generales_2019_m1.yaml @@ -182,3 +182,5 @@ zonificaciones: var5: orden5: + +tiempos_viaje_estaciones: \ No newline at end of file diff --git a/urbantrips/carto/stops.py b/urbantrips/carto/stops.py index b90a86a..6815d1f 100644 --- a/urbantrips/carto/stops.py +++ b/urbantrips/carto/stops.py @@ -5,8 +5,7 @@ import libpysal from urbantrips.carto import carto from urbantrips.geo import geo -from urbantrips.utils.utils import ( - duracion, iniciar_conexion_db, leer_configs_generales) +from urbantrips.utils.utils import duracion, iniciar_conexion_db, leer_configs_generales @duracion @@ -17,23 +16,24 @@ def create_stops_table(): """ configs = leer_configs_generales() - if 'nombre_archivo_paradas' in configs: - if configs['nombre_archivo_paradas'] is not None: - stops_file_name = configs['nombre_archivo_paradas'] + if "nombre_archivo_paradas" in configs: + if configs["nombre_archivo_paradas"] is not None: + stops_file_name = configs["nombre_archivo_paradas"] stops_path = os.path.join("data", "data_ciudad", stops_file_name) print("Leyendo stops", stops_file_name) - if os.path.isfile(stops_path): - stops = pd.read_csv(stops_path) - upload_stops_table(stops) - else: - print("No existe un archivo de stops. Puede utilizar " - "notebooks/stops_creation_with_node_id_helper.ipynb" - "para crearlo a partir de los recorridos" - ) + if os.path.isfile(stops_path): + stops = pd.read_csv(stops_path) + upload_stops_table(stops) + else: + print( + "No existe un archivo de stops. Puede utilizar " + "notebooks/stops_creation_with_node_id_helper.ipynb" + "para crearlo a partir de los recorridos" + ) # upload trave times between stations - if configs['tiempos_viaje_estaciones'] is not None: + if configs["tiempos_viaje_estaciones"] is not None: upload_travel_times_stations() @@ -41,9 +41,17 @@ def upload_stops_table(stops): """ Reads a stops table, checks it and uploads it to db """ - conn = iniciar_conexion_db(tipo='insumos') - cols = ['id_linea', 'id_ramal', 'node_id', 'branch_stop_order', - 'stop_x', 'stop_y', 'node_x', 'node_y'] + conn = iniciar_conexion_db(tipo="insumos") + cols = [ + "id_linea", + "id_ramal", + "node_id", + "branch_stop_order", + "stop_x", + "stop_y", + "node_x", + "node_y", + ] stops = stops.reindex(columns=cols) assert not stops.isna().any().all(), "Hay datos faltantes en stops" @@ -81,8 +89,7 @@ def create_temporary_stops_csv_with_node_id(geojson_path): stops_df = aggregate_line_stops_to_node_id(stops_gdf) data_path = os.path.join("data", "data_ciudad") - stops_df.to_csv(os.path.join(data_path, - "temporary_stops.csv"), index=False) + stops_df.to_csv(os.path.join(data_path, "temporary_stops.csv"), index=False) def create_line_stops_equal_interval(geojson_path): @@ -110,8 +117,8 @@ def create_line_stops_equal_interval(geojson_path): geo.check_all_geoms_linestring(geojson_data) # if there is no branch_id create - if 'id_ramal' not in geojson_data.columns: - geojson_data['id_ramal'] = None + if "id_ramal" not in geojson_data.columns: + geojson_data["id_ramal"] = None # Project in meters epsg_m = geo.get_epsg_m() @@ -120,8 +127,16 @@ def create_line_stops_equal_interval(geojson_path): stops_gdf = interpolate_stops_every_x_meters(geojson_data) stops_gdf = stops_gdf.reindex( - columns=['id_linea', 'id_ramal', 'branch_stop_order', - 'line_stops_buffer', 'x', 'y', 'geometry']) + columns=[ + "id_linea", + "id_ramal", + "branch_stop_order", + "line_stops_buffer", + "x", + "y", + "geometry", + ] + ) stops_gdf = stops_gdf.to_crs(epsg=4326) return stops_gdf @@ -145,14 +160,13 @@ def interpolate_stops_every_x_meters(gdf): line_stops_buffer = row.line_stops_buffer line_stops_data = create_stops_from_route_geom( - route_geom=route_geom, - stops_distance=stops_distance + route_geom=route_geom, stops_distance=stops_distance ) # Add line_id to the stops data - line_stops_data['id_linea'] = row.id_linea - line_stops_data['id_ramal'] = row.id_ramal - line_stops_data['line_stops_buffer'] = line_stops_buffer + line_stops_data["id_linea"] = row.id_linea + line_stops_data["id_ramal"] = row.id_ramal + line_stops_data["line_stops_buffer"] = line_stops_buffer # Add the stops data to the overall stops data list stops_data.append(line_stops_data) @@ -181,10 +195,11 @@ def aggregate_line_stops_to_node_id(stops_gdf): """ # Add node_id for each line - stops_df = stops_gdf\ - .groupby('id_linea', as_index=False)\ - .apply(create_node_id)\ + stops_df = ( + stops_gdf.groupby("id_linea", as_index=False) + .apply(create_node_id) .reset_index(drop=True) + ) return stops_df @@ -212,15 +227,12 @@ def create_stops_from_route_geom(route_geom, stops_distance): ranges = list(range(0, int(route_geom.length), stops_distance)) stop_points = line_interpolate_point(route_geom, ranges).tolist() - stops_df = pd.DataFrame(range(len(stop_points)), - columns=['branch_stop_order']) - stops_df = gpd.GeoDataFrame( - stops_df, geometry=stop_points, - crs=f"EPSG:{epsg_m}") + stops_df = pd.DataFrame(range(len(stop_points)), columns=["branch_stop_order"]) + stops_df = gpd.GeoDataFrame(stops_df, geometry=stop_points, crs=f"EPSG:{epsg_m}") geom_wgs84 = stops_df.geometry.to_crs(epsg=4326) - stops_df['x'] = geom_wgs84.x - stops_df['y'] = geom_wgs84.y + stops_df["x"] = geom_wgs84.x + stops_df["y"] = geom_wgs84.y return stops_df @@ -247,29 +259,32 @@ def create_node_id(line_stops_gdf): gdf = line_stops_gdf.copy() connectivity = libpysal.weights.fuzzy_contiguity( - gdf=gdf, - buffering=True, - drop=False, - buffer=buffer, - predicate='intersects') + gdf=gdf, buffering=True, drop=False, buffer=buffer, predicate="intersects" + ) - gdf.loc[:, 'node_id'] = connectivity.component_labels + gdf.loc[:, "node_id"] = connectivity.component_labels gdf = gdf.to_crs(epsg=4326) # geocode new position based on new node_id - gdf.loc[:, ['stop_x']] = gdf.geometry.x - gdf.loc[:, ['stop_y']] = gdf.geometry.y - - x_new_long = gdf.groupby('node_id').apply( - lambda df: df.stop_x.mean()).to_dict() - y_new_long = gdf.groupby('node_id').apply( - lambda df: df.stop_y.mean()).to_dict() - - gdf.loc[:, 'node_y'] = gdf['node_id'].replace(y_new_long) - gdf.loc[:, 'node_x'] = gdf['node_id'].replace(x_new_long) - - cols = ['id_linea', 'id_ramal', 'node_id', - 'branch_stop_order', 'stop_x', 'stop_y', 'node_x', 'node_y'] + gdf.loc[:, ["stop_x"]] = gdf.geometry.x + gdf.loc[:, ["stop_y"]] = gdf.geometry.y + + x_new_long = gdf.groupby("node_id").apply(lambda df: df.stop_x.mean()).to_dict() + y_new_long = gdf.groupby("node_id").apply(lambda df: df.stop_y.mean()).to_dict() + + gdf.loc[:, "node_y"] = gdf["node_id"].replace(y_new_long) + gdf.loc[:, "node_x"] = gdf["node_id"].replace(x_new_long) + + cols = [ + "id_linea", + "id_ramal", + "node_id", + "branch_stop_order", + "stop_x", + "stop_y", + "node_x", + "node_y", + ] gdf = gdf.reindex(columns=cols) return gdf @@ -277,30 +292,42 @@ def create_node_id(line_stops_gdf): def upload_travel_times_stations(): """ - This function loads a table holding travel time in minutes - between stations for modes that don't have GPS in the vehicles + This function loads a table holding travel time in minutes + between stations for modes that don't have GPS in the vehicles """ configs = leer_configs_generales() - tts_file_name = configs['tiempos_viaje_estaciones'] + tts_file_name = configs["tiempos_viaje_estaciones"] path = os.path.join("data", "data_ciudad", tts_file_name) print("Leyendo tabla de tiempos de viaje entre estaciones", tts_file_name) if os.path.isfile(path): travel_times_stations = pd.read_csv(path) - cols = ['id_o', 'id_linea_o', 'id_ramal_o', 'lat_o', 'lon_o', - 'id_d', 'lat_d', 'lon_d', 'id_linea_d', 'id_ramal_d', - 'travel_time_min'] + cols = [ + "id_o", + "id_linea_o", + "id_ramal_o", + "lat_o", + "lon_o", + "id_d", + "lat_d", + "lon_d", + "id_linea_d", + "id_ramal_d", + "travel_time_min", + ] travel_times_stations = travel_times_stations.reindex(columns=cols) - assert not travel_times_stations.isna().any( - ).all(), "Hay datos faltantes en la tabla" + assert ( + not travel_times_stations.isna().any().all() + ), "Hay datos faltantes en la tabla" print("Subiendo tabla de tiempos de viaje entre estaciones a la DB") - conn = iniciar_conexion_db(tipo='insumos') + conn = iniciar_conexion_db(tipo="insumos") travel_times_stations.to_sql( - "travel_times_stations", conn, if_exists="replace", index=False) + "travel_times_stations", conn, if_exists="replace", index=False + ) conn.close() else: diff --git a/urbantrips/tests/test_routes.py b/urbantrips/tests/test_routes.py index a720b26..a58d00a 100644 --- a/urbantrips/tests/test_routes.py +++ b/urbantrips/tests/test_routes.py @@ -9,12 +9,10 @@ def test_routes(): utils.create_db() routes.process_routes_geoms() - conn_insumos = utils.iniciar_conexion_db(tipo='insumos') + conn_insumos = utils.iniciar_conexion_db(tipo="insumos") - lines_routes = pd.read_sql( - "select * from official_lines_geoms", conn_insumos) - branches_routes = pd.read_sql( - "select * from official_branches_geoms", conn_insumos) + lines_routes = pd.read_sql("select * from official_lines_geoms", conn_insumos) + branches_routes = pd.read_sql("select * from official_branches_geoms", conn_insumos) assert len(lines_routes) == 2 assert len(branches_routes) == 8