Last active
August 30, 2019 16:00
-
-
Save phileas-condemine/4059acb7c5ba8c757f2ef5bdeb030cb9 to your computer and use it in GitHub Desktop.
This script routes the 2M population squares (of 200m x 200m) with 10 nearest maternity wards (defined with flying distance w. FNN) per 50k batches
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
library(data.table) | |
prep_data=F | |
if(prep_data){ | |
# https://www.insee.fr/fr/statistiques/2520034 | |
carreaux=foreign::read.dbf("external_data/carroyage_200m.dbf") | |
carreaux=data.table(carreaux) | |
head(carreaux) | |
carreaux[,idINSPIRE:=as.character(idINSPIRE)] | |
carreaux[,coord:=gsub("CRS3035RES200m","",idINSPIRE)] | |
carreaux$idk=NULL | |
carreaux$nbcar=NULL | |
carreaux[,Y:=stringr::str_extract(coord,"^.+E")] | |
carreaux[,Y:=gsub("[NE]","",Y)] | |
carreaux[,Y:=as.numeric(Y)] | |
carreaux[,X:=stringr::str_extract(coord,"E.+$")] | |
carreaux[,X:=gsub("E","",X)] | |
carreaux[,X:=as.numeric(X)] | |
carreaux$coord=NULL | |
carreaux$id=NULL | |
carreaux$idINSPIRE=NULL | |
library(rgdal) | |
carreaux <- na.omit(carreaux) | |
coordinates(carreaux) <- c("X", "Y") | |
proj4string(carreaux) <- CRS("+init=epsg:3035") # WGS 84 | |
CRS.new <- CRS("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") | |
carreaux <- spTransform(carreaux, CRS.new) | |
carreaux$lon <- data.frame(coordinates(carreaux))$X | |
carreaux$lat <- data.frame(coordinates(carreaux))$Y | |
carreaux=carreaux@data | |
head(carreaux) | |
sapply(carreaux,class) | |
save(carreaux,file="external_data/carroyage_200m.RData") | |
} | |
load("external_data/carroyage_200m.RData") | |
library(magrittr) | |
library(ggplot2) | |
library(plotly) | |
library(leaflet) | |
sum(carreaux$ind_c) | |
samp=carreaux[sample(nrow(carreaux),10000),] | |
# map <- ggplot(data=samp,aes(x=lon,y=lat,color=log(ind_c)))+geom_point()+ | |
# theme(axis.text=element_blank(),axis.ticks = element_blank()) | |
# ggplotly(map) | |
pal <- colorNumeric( | |
palette = "Blues", | |
domain = log(samp$ind_c)) | |
leaflet(data=samp)%>% | |
addTiles()%>% | |
addCircleMarkers(lng=~lon,lat=~lat,color=~pal(log(ind_c))) | |
# library(leaflet) | |
# leaflet()%>% | |
# addTiles()%>% | |
# addCircleMarkers(data=samp,lng=~lon,lat=~lat,fill=~log(ind_c), | |
# clusterOptions = markerClusterOptions()) | |
mater = fread("external_data/maternités/Table maternités 2017 avec coordonnées.csv",dec=",") | |
mater=mater[!substr(FI_ET,1,2)%in%c("97","98")] | |
factpal <- colorFactor(topo.colors(uniqueN(mater$TYPE)), mater$TYPE) | |
leaflet(data=mater)%>% | |
addTiles()%>% | |
addCircleMarkers(lng=~longitude,lat= ~latitude, | |
color = ~factpal(TYPE),fillOpacity = .7,fill=~factpal(TYPE), | |
label = ~paste0("nom: ",NOM_MAT,", type: ",TYPE)) | |
library(FNN) | |
library(jsonlite) | |
carreaux = data.table(carreaux) | |
mater$id=1:nrow(mater) | |
carreaux$id=1:nrow(carreaux) | |
NN <- get.knnx(data = mater[,c("latitude","longitude")], | |
query = carreaux[,c("lat","lon")],k=10)$nn.index | |
colnames(NN) <- paste0("voisin",1:10) | |
NN <- melt(NN) | |
colnames(NN) <- c("carreau","num_voisin","mater") | |
NN <- NN[,c("carreau","mater")] | |
NN <- data.table(NN) | |
NN <- merge(NN,mater[,c("id","latitude","longitude")],by.x="mater",by.y="id") | |
NN <- merge(NN,carreaux[,c("id","lat","lon")],by.x="carreau",by.y="id") | |
setnames(NN,c("latitude","longitude","lat","lon"), | |
c("lat_mater","lon_mater","lat_carr","lon_carr")) | |
save(NN,mater,carreaux,file = "external_data/maternités/NN.RData") | |
maters_id=mater$id | |
carreaux_id=carreaux$id | |
mater_id = sample(maters_id,1) | |
#probleme avec 57:60 département 14 ! | |
done=0 | |
taille_chunks=50000 | |
######################################## | |
#### CAREFUL SCI NOTATION !!!! ######### | |
######################################## | |
options(scipen = 999) | |
done = list.files("external_data/maternités/dist_osrm/") | |
done = done[grep(".RData",done)] | |
done = stringr::str_extract(done,"(req)([0-9]+)(_)") | |
done = gsub("req","",done) | |
done = gsub("_","",done) | |
done = as.numeric(done) | |
for(mater_id in setdiff(maters_id,done)){ | |
print(paste("mater",mater_id)) | |
one_mat= NN[mater==mater_id] | |
lon_lat_mat=one_mat[1,c("lon_mater","lat_mater")]%>%paste(collapse=",") | |
nb_chunks=round(nrow(one_mat)/taille_chunks) | |
one_mat_chunks = split(one_mat[,c("carreau","lon_carr","lat_carr")],1:nb_chunks) | |
chunks_id=names(one_mat_chunks) | |
chunk_id=sample(chunks_id,1) | |
for(chunk_id in chunks_id){ | |
print(paste("chunk",chunk_id)) | |
one_mat_chunk=one_mat_chunks[[chunk_id]] | |
lon_lat_carr = paste(paste(one_mat_chunk$lon_carr,one_mat_chunk$lat_carr,sep = ","),collapse=";") | |
coords = paste(lon_lat_mat,lon_lat_carr,sep=";") | |
# coords="13.388860,52.517037;13.397634,52.529407;13.428555,52.523219" | |
# url <- paste0("http://router.project-osrm.org/table/v1/driving/",coords,"?sources=0") | |
url <- paste0("http://10.200.15.24:5000/table/v1/driving/",coords,"?sources=0") | |
path <- paste0("external_data/maternités/dist_osrm/req",mater_id,"_",chunk_id,".json") | |
DL_done=F | |
trial=1 | |
while(!DL_done&trial<=3){ | |
print(paste("essai",trial)) | |
tryCatch({ | |
download.file(url,destfile = path,mode="wb",quiet = T) | |
routes <- fromJSON(path) | |
durations <- c(t(routes$durations))/60 | |
dt <- data.table(durations=durations,carr=c(0,one_mat_chunk$carreau)) | |
DL_done=T | |
save(dt,file=paste0("external_data/maternités/dist_osrm/req",mater_id,"_",chunk_id,".RData")) | |
file.remove(path) | |
}, error = function(e) { | |
writeLines("error",paste0("external_data/maternités/dist_osrm/error_",mater_id,"_",chunk_id)) | |
}) | |
trial = trial+1 | |
} | |
} | |
} | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment