This report is automatically generated with the R
package knitr
(version 1.40
)
.
source("R Functions/functions_QA data.R") ### LOAD DATA ### #FIND AND REPLACE (Ctrl+F) 'MasterFish' WITH MORE APPROPRIATE NAME (e.g., 'CALFED_data') #CHANGE 'FILENAME' & 'SHEETNAME' WITH ACTUAL NAMES MasterFish <- readxl::read_excel('Reeval_Impl_Goals_Linkage_Analysis/Data/Fish/MasterFish.xlsx', sheet='Data', guess_max = 30000) nrow(MasterFish) #number of rows should match the Excel file (minus the header row)
## [1] 12344
### LIST COLUMNS TO BE USED, ADD USER DEFINED COLUMNS, & RENAME COLUMNS TO CEDEN STANDARDS ### #Use 1.READ ME.xlsx, 'ColumnsForR' to list & identify columns that match corresponding CEDEN Standard columns keep_cols <- c('SourceID','SourceRow','ProjID','SiteName','Study Station Code','SampleDateTime','Common','Number', 'Tissue Type','Hg (ppm, Wet Wt) - original','Weight (g)','Length (mm)','SampleID', 'Water Body Type','LabQualHGppmWetWt') temp_cols <- c('SiteName-303(d)','Genus','Species') #Include columns that do not match CEDEN standards but may be useful (e.g., Unit columns for MDL & RL) #temp_cols are removed before the data is merged with other datasets MasterFish_new <- MasterFish %>% select( c(keep_cols, temp_cols) ) %>% #DO NOT CHANGE - selects columns specified above rename( #Rename MasterFish columns to CEDEN format here: CEDEN 'COLUMNNAME' = MasterFish 'COLUMNNAME' 'CitationCode' = 'ProjID', 'StationName' = 'SiteName', 'StationCode' = 'Study Station Code', 'SampleDate' = 'SampleDateTime', 'CommonName' = 'Common', 'NumberFishPerComp' = 'Number', 'TissueName' = 'Tissue Type', 'Result' = 'Hg (ppm, Wet Wt) - original', 'WeightAvg g' = 'Weight (g)', 'TLAvgLength mm' = 'Length (mm)', 'SampleID' = 'SampleID', 'WBT' = 'Water Body Type', 'ResultComments' = 'LabQualHGppmWetWt' ) %>% mutate( # #Create Missing column or modify existing column here: CEDEN COLUMNNAME = 'SPECIFIED VALUE' or FUNCTION # #DELTE COLUMN NAMES THAT DO NOT NEED TO BE CHANGED CoordSystem = 'NAD 1927', ProjectCode = CitationCode, SampleTime = NA_character_, # SampleDateTime column does not have any time values ProgramName = NA_character_, ParentProjectName = NA_character_, CompositeID = NA_character_, TaxonomicName = paste(Genus, Species), Method = NA_character_, Analyte = 'Mercury, Total', Unit = 'mg/Kg ww', `TLMin mm` = NA_real_, `TLMax mm` = NA_real_, CompositeRowID = NA_character_, QACode = NA_character_, BatchVerification = NA_character_, ComplianceCode = NA_character_, LabSubmissionCode = NA_character_ ) nrow(MasterFish_new)
## [1] 12344
# Filter by WTB and to exclude specified sites with pond or lake in name that are not part of rivers or in delta # doing this now to reduce SiteNames and help with linking Lat Long later unique(MasterFish_new$WBT)
## [1] "Rivers/Streams" "Lake, Reservoir" "Hatchery" "Estuary" ## [5] "Wetland" "SF Bay"
MasterFish_new <- MasterFish_new %>% filter( StationName == 'Robinson Pond' | WBT %in% c('Rivers/Streams','Estuary','Wetland') ) %>% mutate(WBT = recode(WBT, 'Rivers/Streams' = 'River/Stream', #recode 'Rivers/Streams' to standard format 'Lake, Reservoir' = 'Lake/Reservoir')) #recode 'Lake, Reservoir' to standard format unique(MasterFish_new$WBT)
## [1] "River/Stream" "Estuary" "Wetland" "Lake/Reservoir"
nrow(MasterFish_new)
## [1] 7984
#str(MasterFish_new) #just to check data class of different columns - e.g., is Date column in POSIX format? #View(MasterFish_new) ### ADD PROJECTNAME AND LAT LONG COLUMNS FROM METADATA ### # Copy in ProjID from "Projects" Sheet to create ProjName column # MF_projects <- readxl::read_excel('Reeval_Impl_Goals_Linkage_Analysis/Data/Fish/MasterFish.xlsx', sheet='Projects', guess_max = 30000) %>% select(ProjID, ProjName) %>% # Select only the ProjID and ProjName columns rename(CitationCode = ProjID, # rename columns to CEDEN format ProjectName = ProjName) # Join Project Names by ProjID to CitationCode in MasterFish_new MasterFish_new <- MasterFish_new %>% left_join(., MF_projects, by='CitationCode') nrow(MasterFish_new)
## [1] 7984
# Copy in Latitude & Longitude from "Sites" Sheet to create TargetLatitude & TargetLongitude columns MF_coords <- readxl::read_excel('Reeval_Impl_Goals_Linkage_Analysis/Data/Fish/MasterFish.xlsx', sheet='Sites with LatLong', guess_max = 30000) %>% select(ProjID, SiteName, Latitude, Longitude) %>% # Select only the SiteName,Latitude & Longitude columns & remove SiteNames with no Lat Long info rename(CitationCode = ProjID, StationName = SiteName, TargetLatitude = Latitude, TargetLongitude = Longitude) # Copy Lat/Long from MF_coords to MasterFish_new # A lot of SiteNames don't match so have to use various methods to link as many sites to Lat/Long as possible #Using StationName and CitationCode MasterFish_new <- MasterFish_new %>% left_join(., MF_coords, by=c('StationName', 'CitationCode')) nrow(MasterFish_new)
## [1] 7984
#Get Sites that still don't have Lat/Long & use SiteName 303(d) name with CitationCode filtered1 <- MasterFish_new %>% filter(., is.na(TargetLatitude)) %>% #keep only sites without Lat/Long select(-TargetLatitude, -TargetLongitude) %>% #remove empty Lat & Long columns so that they can be re-added left_join(., MF_coords, by=c('SiteName-303(d)'='StationName', 'CitationCode')) %>% filter(., !is.na(TargetLatitude)) # now keep only the sites that have a Lat/Long nrow(filtered1)
## [1] 54
# Replace Rows in MasterFish_new with new rows from filtered1 MasterFish_new[match(filtered1$SourceRow, MasterFish_new$SourceRow), ] <- filtered1 nrow(MasterFish_new)
## [1] 7984
#Get Sites that still don't have Lat/Long & use SiteName 303(d) name without CitationCode MF_coords_noCitCode <- MF_coords %>% select(-CitationCode) %>% #remove CitationCode from MF_coords so two Citation cols aren't added when left_joining MasterFish and MF_coords distinct(StationName, .keep_all=T) filtered2 <- MasterFish_new %>% filter(., is.na(TargetLatitude)) %>% #keep only sites without Lat/Long select(-TargetLatitude, -TargetLongitude) %>% #remove empty Lat & Long columns so that they can be re-added left_join(., MF_coords_noCitCode, by=c('SiteName-303(d)'='StationName')) %>% filter(., !is.na(TargetLatitude)) # now keep only the sites that have a Lat/Long nrow(filtered2)
## [1] 1517
# Replace Rows in MasterFish_new with new rows from filtered2 MasterFish_new[match(filtered2$SourceRow, MasterFish_new$SourceRow), ] <- filtered2 nrow(MasterFish_new)
## [1] 7984
#t <- MasterFish_new %>% # filter(is.na(TargetLatitude)) #sort(unique(t$StationName)) #104 StationNames without Lat/Long value #sort(unique(t$CitationCode)) #16 CitationCodes without Lat/Long values - may have vlues in other sources or may need to look up values from literature #nrow(t) ### FORMAT COLUMN PARAMETERS ### # Standardize TissueName Groups - "Fillet" & "Whole Body" # unique(MasterFish_new$TissueName)
## [1] "Fillet" "Whole Body" "Soft Tissue" "Liver" "Ova" "Gills" ## [7] "Gonads" "Kidney"
nrow(MasterFish_new)
## [1] 7984
MasterFish_new <- MasterFish_new %>% filter(TissueName %in% c('Fillet','Whole Body')) unique(MasterFish_new$TissueName)
## [1] "Fillet" "Whole Body"
nrow(MasterFish_new)
## [1] 7484
# Standardize ResultQualCode Groups - "ND", "DNQ", NA# unique(MasterFish_new$ResultComments) #Use Result Comments to create ResultQualCode column
## [1] "m=total?" ## [2] NA ## [3] "ave of duplicates, 0.243 and 0.238" ## [4] "ave of duplicates, 0.051 and 0.048" ## [5] "ave of duplicates, 0.109 and 0.106" ## [6] "< Reporting Limit (0.02)" ## [7] "\"none\"" ## [8] "BRL = detectable concentration below reporting limit (RL = 0.147 ug/g dw; 0.0282 ug/g ww; detection limit = 0.025 ug/g dw)" ## [9] "<RL (RL=3.14 ng/g, 0.00314 ppm)" ## [10] "I=absorbance below lowest standard, Data was evaluated and determined to be Valid" ## [11] "none" ## [12] "ND=non detect" ## [13] "average of duplicates, 2.00 and 1.45 ppm" ## [14] "average of duplicates, 1.77 and 1.77 ppm" ## [15] "weight=total?" ## [16] "<DL, 0.0386" ## [17] "<0.007" ## [18] "average of duplicates, 1.1 and 1.0 ppm" ## [19] "<.0314"
#see if the below grepl logic only captures the average of duplicates so we can ignore those for now unique(MasterFish_new$ResultComments[grepl("ave",MasterFish_new$ResultComments)])
## [1] "ave of duplicates, 0.243 and 0.238" "ave of duplicates, 0.051 and 0.048" ## [3] "ave of duplicates, 0.109 and 0.106" "average of duplicates, 2.00 and 1.45 ppm" ## [5] "average of duplicates, 1.77 and 1.77 ppm" "average of duplicates, 1.1 and 1.0 ppm"
#it worked, so use inverse code to just look at result comments that are not average of duplicates unique(MasterFish_new$ResultComments[!grepl("ave",MasterFish_new$ResultComments)])
## [1] "m=total?" ## [2] NA ## [3] "< Reporting Limit (0.02)" ## [4] "\"none\"" ## [5] "BRL = detectable concentration below reporting limit (RL = 0.147 ug/g dw; 0.0282 ug/g ww; detection limit = 0.025 ug/g dw)" ## [6] "<RL (RL=3.14 ng/g, 0.00314 ppm)" ## [7] "I=absorbance below lowest standard, Data was evaluated and determined to be Valid" ## [8] "none" ## [9] "ND=non detect" ## [10] "weight=total?" ## [11] "<DL, 0.0386" ## [12] "<0.007" ## [13] "<.0314"
#NDs: "< Reporting Limit (0.02)"; "< DL of 0.003"; "ND=non detect"; "<DL, 0.0386"; "<0.007"; "<RL (RL=3.14 ng/g, 0.00314 ppm)"; "<.0314" #DNQs: "BRL = detectable concentration below reporting limit..."; # RL & DL units seem to be in ppm, which equals mg/Kg MasterFish_new <- MasterFish_new %>% mutate(ResultQualCode = case_when(grepl('BRL',ResultComments) ~ 'DNQ', grepl('<|ND',ResultComments) ~ 'ND'), Result = if_else(grepl('DNQ|ND',ResultQualCode), NA_real_, Result), MDL = case_when(ResultQualCode == 'DNQ' ~ 0.025, grepl('<RL', ResultComments) ~ 0.00314, grepl('ND=non detect', ResultComments) ~ NA_real_, ResultQualCode == 'ND' & !grepl('<RL',ResultComments) & ResultComments != 'ND=non detect' ~ suppressWarnings(as.numeric(sub('.*?(((\\d\\.)|\\.)\\d+).*','\\1',ResultComments)))), RL = if_else(ResultQualCode == 'DNQ', 0.147, NA_real_)) unique(MasterFish_new$ResultQualCode) #New naming structure for ResultQualCode Groupings
## [1] NA "ND" "DNQ"
# Format Result Column to Numeric# is.numeric(MasterFish_new$Result)
## [1] TRUE
#[1] TRUE - no changes necessary; MDL & RL columns are also numeric # Check if Result, MDL, & RL Columns all equal <NA> or 0 - these rows have no useful information nrow(MasterFish_new) #Number rows before
## [1] 7484
#CODE BELOW REQUIRES USER TROUBLESHOOTING DEPENDING ON AVAILABLE COLUMNS AND SPREADSHEET SPECIFIC CONDITIONS# MasterFish_new <- MasterFish_new %>% #Set 0 & negative values as blank mutate(Result = ifelse(Result <= 0, NA_real_, Result), MDL = ifelse(MDL <= 0, NA_real_, MDL), RL = ifelse(RL <= 0, NA_real_, RL)) na_results <- MasterFish_new %>% #Record rows where Result, MDL, & RL all equal <NA> filter( is.na(Result) & is.na(MDL) & is.na(RL) ) nrow(na_results)
## [1] 155
MasterFish_new <- anti_join(MasterFish_new, na_results, by='SourceRow') #returns rows from MasterFish_new not matching values in no_result nrow(MasterFish_new) #Number rows after
## [1] 7329
# Format Units Column - "mg/Kg ww" unique(MasterFish_new$Unit) #Identifies OLDNAMES
## [1] "mg/Kg ww"
# [1] "mg/Kg ww" - no changes necessary # Format Date and Time Column # is.character(MasterFish_new$SampleDate)
## [1] TRUE
# [1] TRUE - need to convert incomplete dates (i.e., only year listed) to full date unique(MasterFish_new$SampleDate)
## [1] "1970" "37798" "37791" "37790" ## [5] "38882" "39567" "39568" "38657" ## [9] "29828" "38559" "38847" "30267" ## [13] "30561" "28701" "2003" "2002" ## [17] "1999" "29410" "34278" "38643" ## [21] "1998" "2000" "33179" "2001" ## [25] "30211" "29116" "32009" "38623" ## [29] "33527" "32381" "38723" "1971" ## [33] "25797" "1997" "40128" "40127" ## [37] "39364" "34390" "39357" "34395" ## [41] "39371" "39769" "29875" "30244" ## [45] "32400" "34292" "34323" "30196" ## [49] "38232" "38156" "38197" "38153" ## [53] "38916" "31187" "35775" "31996" ## [57] "35776" "31593" "36972" "36937" ## [61] "37014" "36942" "37084" "37126" ## [65] "36655" "36599" "36844" "36654" ## [69] "37055" "37049" "36739" "36852" ## [73] "36398" "36441" "30209" "38692" ## [77] "36426" "34985" "36404" "36405" ## [81] "32455" "Spring 1998" "38944" "32849" ## [85] "37929" "36859" "36860" "34973" ## [89] "36868" "37112" "May 14-18, 2001" "29873" ## [93] "29460" "36858" "36861" "28731" ## [97] "36573" "36857" "36840" "36668" ## [101] "32430" "37026" "36745" "36670" ## [105] "36866" "32784" "30245" "29164" ## [109] "36882" "37894" "36581" "34279" ## [113] "32440" "38580" "36384" "36381" ## [117] "38915" "38589" "29517" "29790" ## [121] "32409" "32045" "32402" "37565" ## [125] "30159" "38931" "32393" "31617" ## [129] "33178" "32010" "37019" "36595" ## [133] "37021" "36742" "36663" "36836" ## [137] "36385" "36396" "36439" "9/7-8/99" ## [141] "36088" "36447" "36445" "36431" ## [145] "36349" "36117" "36446" "36461" ## [149] "36307" "36459" "38930" "39002" ## [153] "39003" "30958" "26045" "36111" ## [157] "36119" "36097" "36419" "36131" ## [161] "36473" "36486" "36468" "36130" ## [165] "36440" "37096" "38602" "38621" ## [169] "38596" "38607" "38595" "38622" ## [173] "38560" "38603" "37083" "31344" ## [177] "38637" "38567" "38565" "34289" ## [181] "38558" "38566" "29854" "38601" ## [185] "32049" "31705" "31275" "31645" ## [189] "30215" "38594" "38672" "32449" ## [193] "31701" "33541" "31372" "34290" ## [197] "32129" "30607" "29124" "30901" ## [201] "30224" "38636" "37557" "39301" ## [205] "36460" "36503" "36457" "36098" ## [209] "36102" "36334" "36487" "36427" ## [213] "33532" "36420" "36322" "36090" ## [217] "36338" "36075" "36474" "37145" ## [221] "37097" "25807" "36430" "32126" ## [225] "32100" "37068" "37095" "39300" ## [229] "39370" "39391" "38673" "36025" ## [233] "36425" "36424" "33183" "32378" ## [237] "36467" "36423" "36465" "36118" ## [241] "36450" "36453" "37213" "36504" ## [245] "36496" "36448" "37082" "32104" ## [249] "30960" "26031" "36502" "36767" ## [253] "36768" "36872" "36873" "37210" ## [257] "39022" "39029" "1973" "39030" ## [261] "39057" "25764" "37140" "33912" ## [265] "29536" "36033" "28710" "33522" ## [269] "29760" "29412" "33563" "31713" ## [273] "36041" "31280" "28726" "29075" ## [277] "36095" "31573" "30596" "31643" ## [281] "30910" "39023" "32834" "33171" ## [285] "34275" "32052" "33135" "30879" ## [289] "34633" "38883" "33520" "28739" ## [293] "30223" "34268" "38846" "38078" ## [297] "37222" "38079" "33547" "29117" ## [301] "32014" "29454" "30946" "29881" ## [305] "30544" "34600" "34599" "37867" ## [309] "38213" "37872" "34496" "37594" ## [313] "34634" "37581" "37595" "37868" ## [317] "37833" "37788" "37789" "37893" ## [321] "37873" "32484" "30622" "36433" ## [325] "36407" "34604" "29151" "29354" ## [329] "36865" "28744" "29152" "29355" ## [333] "37201" "29356" "31554" "32373" ## [337] "1996" "March 1995" NA "28723" ## [341] "29515" "32058" "30881" "31300" ## [345] "38650" "38617" "35388" "36103" ## [349] "36673" "31757" "37202" "32420" ## [353] "30589" "38645" "29850" "29752" ## [357] "30624" "29467" "36104" "29153" ## [361] "28746" "38868" "36402" "36382" ## [365] "38608" "29868" "29475" "29123" ## [369] "28684" "32086" "35558" "31337" ## [373] "36146" "34598" "36413" "36108" ## [377] "36510" "36507" "36508" "37139" ## [381] "36412" "36150" "36769" "36874" ## [385] "36875" "37067" "1997-1998" "Fall 1998" ## [389] "29018" "28654" "29783" "30154" ## [393] "37728" "37930" "38917" "30172" ## [397] "29502" "29782" "34256" "33150" ## [401] "30582" "32413" "35396" "32770" ## [405] "35787" "36129" "31666" "33904" ## [409] "37587" "36516" "37783" "37908" ## [413] "38582" "25994" "37909" "38932" ## [417] "26001" "38656" "29797" "38923" ## [421] "37558" "37512" "37910" "38581" ## [425] "38579" "37559" "39056" "31295" ## [429] "32840" "32133" "38628" "36451" ## [433] "38629" "36458" "36049" "38676" ## [437] "38675" "38674" "37208" "36850" ## [441] "34603" "29853" "30217" "29473" ## [445] "28709" "31182" "31615" "32441" ## [449] "38644" "29855" "29158" "28711" ## [453] "30216" "29472" "30560" "34319" ## [457] "32113" "29437" "29118" "28752" ## [461] "34263" "34243" "34417" "33526" ## [465] "34996" "34285" "36432" "1972"
MasterFish_new <- MasterFish_new %>% filter(!is.na(SampleDate)) %>% # remove the 1 sample that does not have a date - R5MF7868 - Marsh Creek/1 mile above Dunn Ck Confluence mutate( SampleDate = recode(SampleDate, '1970' = '25569', #excel value for 1/1/1970 '1971' = '25934', #excel value for 1/1/1971 '1972' = '26299', #excel value for 1/1/1972 '1973' = '26665', #excel value for 1/1/1973 '1996' = '35065', #excel value for 1/1/1996 '1997' = '35431', #excel value for 1/1/1997 '1998' = '35796', #excel value for 1/1/1998 '1999' = '36161', #excel value for 1/1/1999 '2000' = '36526', #excel value for 1/1/2000 '2001' = '36892', #excel value for 1/1/2001 '2002' = '37257', #excel value for 1/1/2002 '2003' = '37622', #excel value for 1/1/2003 '1997-1998' = '35796', #excel value for 1/1/1998 '9/7-8/99' = '36410', #excel value for 9/7/1999 'Fall 1998' = '36100', #excel value for 11/1/1998 'March 1995' = '34759', #excel value for 3/1/1995 'May 14-18, 2001' = '37025', #excel value for 5/14/2001 'Spring 1998' = '35886', #excel value for 4/1/1998 ), SampleDate = format(as.Date(as.numeric(SampleDate), origin = "1899-12-30"), '%Y-%m-%d'), #convert Excel numeric date to actual date #From Template Code #If SampleDate & CollectioTIme are not in Character format by defualt, turn it into a character class so it exports better SampleDate = ifelse(sapply(SampleDate, is.character), SampleDate, as.character(as.Date(SampleDate))), SampleTime = ifelse(sapply(SampleTime, is.character), SampleTime, format(lubridate::ymd_hms(SampleTime), "%H:%M:%S")), #COMBINE DATE AND TIME INTO SampleDateTime COLUMN SampleDateTime = ifelse(!is.na(SampleTime), paste(SampleDate, SampleTime), paste(SampleDate, '00:00:00')), #FORMAT SampleDateTime COLUMN TO DATE FORMAT SampleDateTime = lubridate::ymd_hms(SampleDateTime) ) nrow(MasterFish_new)
## [1] 7328
### REMOVE TEMPORARY COLUMNS ### MasterFish_new <- MasterFish_new %>% select(-one_of(temp_cols)) #Remove temp columns since they are no longer needed #View(MasterFish_new) ### SAVE FORMATTED DATA AS EXCEL FILE ### writexl::write_xlsx(MasterFish_new, path='Reeval_Impl_Goals_Linkage_Analysis/Data/Fish/MasterFish_ceden_format.xlsx') # In excel, to convert SampleDate column to Date format # 1 - Select the date column. # 2 - Go to the Data-tab and choose "Text to Columns". # 3 - On the first screen, leave radio button on "delimited" and click Next. # 4 - Unselect any delimiter boxes (everything blank) and click Next. # 5 - Under column data format choose Date, select YMD # 6 - Click Finish.
The R session information (including the OS info, R version and all packages used):
sessionInfo()
## R version 4.2.2 (2022-10-31 ucrt) ## Platform: x86_64-w64-mingw32/x64 (64-bit) ## Running under: Windows 10 x64 (build 22621) ## ## Matrix products: default ## ## locale: ## [1] LC_COLLATE=English_United States.utf8 LC_CTYPE=English_United States.utf8 ## [3] LC_MONETARY=English_United States.utf8 LC_NUMERIC=C ## [5] LC_TIME=English_United States.utf8 ## ## attached base packages: ## [1] stats graphics grDevices utils datasets methods base ## ## other attached packages: ## [1] lubridate_1.8.0 plotly_4.10.0 readxl_1.4.1 actuar_3.3-0 ## [5] NADA_1.6-1.1 forcats_0.5.2 stringr_1.4.1 dplyr_1.0.9 ## [9] purrr_0.3.4 readr_2.1.2 tidyr_1.2.0 tibble_3.1.8 ## [13] ggplot2_3.3.6 tidyverse_1.3.2 fitdistrplus_1.1-8 survival_3.4-0 ## [17] MASS_7.3-58.1 ## ## loaded via a namespace (and not attached): ## [1] lattice_0.20-45 assertthat_0.2.1 digest_0.6.29 utf8_1.2.2 ## [5] R6_2.5.1 cellranger_1.1.0 backports_1.4.1 reprex_2.0.2 ## [9] evaluate_0.16 highr_0.9 httr_1.4.4 pillar_1.8.1 ## [13] rlang_1.0.5 lazyeval_0.2.2 googlesheets4_1.0.1 rstudioapi_0.14 ## [17] data.table_1.14.2 Matrix_1.5-1 splines_4.2.2 googledrive_2.0.0 ## [21] htmlwidgets_1.5.4 munsell_0.5.0 broom_1.0.1 compiler_4.2.2 ## [25] modelr_0.1.9 xfun_0.32 pkgconfig_2.0.3 htmltools_0.5.3 ## [29] tidyselect_1.1.2 fansi_1.0.3 viridisLite_0.4.1 crayon_1.5.1 ## [33] tzdb_0.3.0 dbplyr_2.2.1 withr_2.5.0 grid_4.2.2 ## [37] jsonlite_1.8.0 gtable_0.3.1 lifecycle_1.0.1 DBI_1.1.3 ## [41] magrittr_2.0.3 scales_1.2.1 writexl_1.4.0 cli_3.3.0 ## [45] stringi_1.7.8 fs_1.5.2 xml2_1.3.3 ellipsis_0.3.2 ## [49] generics_0.1.3 vctrs_0.4.1 expint_0.1-7 tools_4.2.2 ## [53] glue_1.6.2 hms_1.1.2 fastmap_1.1.0 colorspace_2.0-3 ## [57] gargle_1.2.0 rvest_1.0.3 knitr_1.40 haven_2.5.1
Sys.time()
## [1] "2024-01-05 10:35:18 PST"