Skip to content

Commit

Permalink
updated ftp.R name in gitignore, added RCurl to dependencies, removed…
Browse files Browse the repository at this point in the history
… "dir_google_drive_upload::as_id" from run file, but put it into functions instead.

changed current_ to current_daily that will be transfered to filezilla (no anoms anymore)
  • Loading branch information
Liz.Dawson authored and Liz.Dawson committed Jul 6, 2022
1 parent 92f78d0 commit e5b0d9c
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -35,5 +35,5 @@ vignettes/*.pdf
ConnectToOracle.R
*.log
/shapefiles/archive
/code/ftp_login.R
/code/ftp.R

6 changes: 4 additions & 2 deletions code/functions.R
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ PKG <- c(
"janitor",

# "tinytex",
"RCurl",

# tidyverse,
"broom",
Expand Down Expand Up @@ -327,7 +328,7 @@ make_varplot_wrapper <- function(maxyr,
# path = dir_googledrive_upload,
# overwrite = FALSE)

dir_googledrive_upload0 <- googledrive::drive_ls(path = dir_googledrive_upload) %>%
dir_googledrive_upload0 <- googledrive::drive_ls(path = googledrive:as_id(dir_googledrive_upload)) %>%
dplyr::filter(name == "anom") %>%
dplyr::select("id") %>%
unlist() %>%
Expand Down Expand Up @@ -1235,7 +1236,8 @@ make_figure <- function(
for (iii in 1:length(temp)) {
drive_upload(
media = temp[iii],
path = ifelse(class(dir_googledrive_upload)[1] == "drive_id", dir_googledrive_upload, googledrive::as_id(dir_googledrive_upload)),
path = googledrive::as_id(dir_googledrive_upload),
#path = ifelse(class(dir_googledrive_upload)[1] == "drive_id", dir_googledrive_upload, googledrive::as_id(dir_googledrive_upload)),
overwrite = TRUE)
}
}
Expand Down
34 changes: 16 additions & 18 deletions code/run.R
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ googledrive_dl <- TRUE
dir_googledrive_log <- "https://docs.google.com/spreadsheets/d/16CJA6hKOcN1a3QNpSu3d2nTGmrmBeCdmmBCcQlLVqrE/edit#gid=315914502"
dir_googledrive_upload_bs = "https://drive.google.com/drive/folders/1vWza36Dog0SpZLcTN22wD-iCEn6ooGCM"
dir_googledrive_upload_ai = "https://drive.google.com/drive/folders/1SeNOAh5-muQ2BDgOHWZWwYIoLl68DHWX"
dir_googledrive_upload_test = "https://drive.google.com/drive/folders/1rsR0aFfFzrspTBFU48Bb26EJvdhIZSpl"
#dir_googledrive_upload_test = "https://drive.google.com/drive/folders/1rsR0aFfFzrspTBFU48Bb26EJvdhIZSpl"

# The surveys this script will be covering
dat_survreg <- data.frame(reg_shapefile = "EBS_SHELF",
Expand Down Expand Up @@ -72,16 +72,15 @@ source(file = paste0(dir_wd,"code/functions.R"))
# source(file = paste0(dir_wd, "code/data_dl.R")) # you don't unnecessarily run this each time
source(file = paste0(dir_wd, "code/data.R"))
if (googledrive_dl == TRUE) {
source(file = paste0(dir_wd, "code/ftp_login.R")) # removed in gitignore - ask for premission
source(file = paste0(dir_wd, "code/ftp.R")) # removed in gitignore - ask for premission
}

# Map --------------------------------------------------------------------------
# ## AI --------------------------------------------------------------------------
SRVY <- "AI"
region_akgfmaps = "ai"
plot_subtitle = "NOAA Fisheries Aleutian Islands Bottom Trawl Survey"
dir_googledrive_upload <- googledrive::as_id(dir_googledrive_upload_ai)
dir_googledrive_upload <- googledrive::as_id(dir_googledrive_upload_test)
dir_googledrive_upload <- (dir_googledrive_upload_ai)
plot_anom <- FALSE
show_planned_stations <- FALSE
survey_area <- akgfmaps::get_base_layers(select.region = region_akgfmaps, set.crs = "auto")
Expand Down Expand Up @@ -152,9 +151,9 @@ make_varplot_wrapper(maxyr = maxyr, # Daily plot
# dir_wd = dir_wd)

## send all current files to the FTP -------------------------------------------
# vars here defined in ftp_login.R
# vars here defined in ftp.R
dir_out <- paste0(getwd(),"/output/",maxyr,"_",SRVY,"/")
temp <- list.files(path = dir_out, pattern = "current_", full.names = FALSE)
temp <- list.files(path = dir_out, pattern = "current_daily", full.names = FALSE)
dest <- dev_bs

for (iiii in 1:length(temp)) {
Expand All @@ -168,26 +167,25 @@ for (iiii in 1:length(temp)) {
.opts=curlOptions(verbose=TRUE))
}


# NBS + EBS Maps --------------------------------------------------------------

SRVY <- "BS"
region_akgfmaps = "bs.all"
plot_subtitle <- "NOAA Fisheries Bering Sea Bottom Trawl Survey"
dir_googledrive_upload <- googledrive::as_id(dir_googledrive_upload_bs)
# dir_googledrive_upload <- googledrive::as_id(dir_googledrive_upload_test)
dir_googledrive_upload <- (dir_googledrive_upload_bs)
survey_area <- akgfmaps::get_base_layers(select.region = region_akgfmaps, set.crs = "auto")
show_planned_stations <- TRUE
plot_anom <- TRUE
survey_area$survey.grid <- survey_area$survey.grid %>%
survey_area$survey.grid <- survey_area$survey.grid %>%
sf::st_transform(x = ., survey_area$crs$input) %>%
dplyr::rename(station = STATIONID) %>%
sp::merge(x = .,
sp::merge(x = .,
y = haul %>%
dplyr::rename(station = stationid) %>%
dplyr::select(station, stratum) %>%
dplyr::distinct(),
all.x = TRUE) %>%
dplyr::rename(station = stationid) %>%
dplyr::select(station, stratum) %>%
dplyr::distinct(),
all.x = TRUE) %>%
dplyr::mutate(region = "Bering Sea")
survey_area$place.labels$y[survey_area$place.labels$lab == "200 m"] <- -60032.7

Expand Down Expand Up @@ -234,9 +232,9 @@ make_varplot_wrapper(maxyr = maxyr, # Daily plot
# dir_wd = dir_wd)

## send all current files to the FTP -------------------------------------------
# vars here defined in ftp_login.R
# vars here defined in ftp.R
dir_out <- paste0(getwd(),"/output/",maxyr,"_",SRVY,"/")
temp <- list.files(path = dir_out, pattern = "current_", full.names = FALSE)
temp <- list.files(path = dir_out, pattern = "current_daily", full.names = FALSE)
dest <- dev_bs

for (iiii in 1:length(temp)) {
Expand All @@ -250,4 +248,4 @@ for (iiii in 1:length(temp)) {
.opts=curlOptions(verbose=TRUE))
}

# sink()
# # sink()

0 comments on commit e5b0d9c

Please sign in to comment.