Skip to content

Commit

Permalink
update prep_health_facilities
Browse files Browse the repository at this point in the history
  • Loading branch information
rafapereirabr committed Mar 20, 2024
1 parent 59d2964 commit 9887a92
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 199 deletions.
39 changes: 24 additions & 15 deletions data_prep/R/health_facilities.R
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@

update_health_facilities <- function(){

#' source:
#' https://dados.gov.br/dados/conjuntos-dados/cnes-cadastro-nacional-de-estabelecimentos-de-saude
file_url = 'https://s3.sa-east-1.amazonaws.com/ckan.saude.gov.br/CNES/cnes_estabelecimentos.zip'
#' #' source:
#' #' https://dados.gov.br/dados/conjuntos-dados/cnes-cadastro-nacional-de-estabelecimentos-de-saude
#' file_url = 'https://s3.sa-east-1.amazonaws.com/ckan.saude.gov.br/CNES/cnes_estabelecimentos.zip'

# determine date of last update
caminho_api <- "https://dados.gov.br/api/publico/conjuntos-dados/cnes-cadastro-nacional-de-estabelecimentos-de-saude"
Expand All @@ -26,8 +26,13 @@ update_health_facilities <- function(){
date_update <- gsub("-", "", date_update)
year_update <- substring(date_update, 1, 4)


# date shown to geobr user
geobr_date <- substr(date_update, 1, 6)


# wodnload file to tempdir
temp_local_file <- download_file(file_url = file_url)
temp_local_file <- download_file(file_url = meta$url)

# unzip file to tempdir
temp_local_dir <- tempdir()
Expand Down Expand Up @@ -55,10 +60,11 @@ update_health_facilities <- function(){

# fix code_muni to 7 digits
muni <- geobr::read_municipality(code_muni = 'all', year = as.numeric(year_update) - 1)
code7 <- data.table(code_muni = muni$code_muni)
code7[, code_muni6 := as.numeric(substring(code_muni, 1, 6))]
data.table::setDT(muni)
muni[, code_muni6 := as.numeric(substring(code_muni, 1, 6))]
muni <- muni[, .(code_muni6, code_muni)]

dt[code7, on = 'code_muni6', code_muni := i.code_muni]
dt[muni, on = 'code_muni6', code_muni := i.code_muni]
dt[, code_muni6 := NULL]

# add state and region
Expand All @@ -83,12 +89,15 @@ update_health_facilities <- function(){
# dt[is.na(lat) | is.na(lon),]
# dt[lat==0,]

# replace NAs with 0
data.table::setnafill(dt,
type = "const",
fill = 0,
cols=c("lat","lon")
)
# dt[code_cnes=='0000930', lat]
# dt[code_cnes=='0000930', lon]
#
# # replace NAs with 0
# data.table::setnafill(dt,
# type = "const",
# fill = 0,
# cols=c("lat","lon")
# )



Expand All @@ -103,12 +112,12 @@ update_health_facilities <- function(){


# create folder to save the data
dest_dir <- paste0('./data/health_facilities/')
dest_dir <- paste0('./data/health_facilities/', geobr_date)
dir.create(path = dest_dir, recursive = TRUE, showWarnings = FALSE)


# Save raw file in sf format
sf::st_write(cnes_sf,
sf::st_write(temp_sf,
dsn= paste0(dest_dir, 'cnes_', date_update,".gpkg"),
overwrite = TRUE,
append = FALSE,
Expand Down
183 changes: 0 additions & 183 deletions r-package/prep_data/prep_health_facilities.R

This file was deleted.

4 changes: 3 additions & 1 deletion r-package/prep_data/prep_schools.R
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
#> DATASET: schools 2020
#> Source: INEP - http://portal.inep.gov.br/web/guest/dados/catalogo-de-escolas
#> Source: INEP -
#> https://www.gov.br/inep/pt-br/acesso-a-informacao/dados-abertos/inep-data/catalogo-de-escolas
#>
#: scale
#> Metadata:
# Titulo: schools
Expand Down
34 changes: 34 additions & 0 deletions r-package/tests/tests_rafa/long_term_cache.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# long-term cache

library(httr)
am <- 'https://github.com/ipeaGIT/geobr/releases/download/v1.7.0/amazonia_legal.gpkg'
as <- 'https://github.com/ipeaGIT/geobr/releases/download/v1.7.0/amazonia_legal_simplified.gpkg'



eee <- HEAD(url = am)
eee$headers$etag
eee$all_headers

HEAD(url = am)$headers$etag

# get all etags for metadata table
aaa <- lapply(X = c(am, as), FUN = function(x){HEAD(url = x)$headers$etag})
unlist(aaa)


f <- 'C:/Users/user/Downloads/amazonia_legal (2).gpkg'

file.info(f)$ctime

fff <- HEAD(url = f)



#' metadata table should have url and etag

#' DOWNLOAD fun
#' 1)if etag is not saved locally,
#' download fun should save etag locally and download data
#' if etag and data exists locally, then compare against metadata
#

0 comments on commit 9887a92

Please sign in to comment.