forked from adelvecchia/NEON-reaeration
-
Notifications
You must be signed in to change notification settings - Fork 0
/
manual reaeration.R
547 lines (443 loc) · 24.6 KB
/
manual reaeration.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
###manual running of sites
manual.reaeration <- function(
inputFile = NULL,
loggerData = NULL,
namedLocation = "namedLocation",
injectionTypeName = "injectionType",
eventID = "eventID",
stationToInjectionDistance = "stationToInjectionDistance",
plateauGasConc = "plateauGasConc",
corrPlatSaltConc = "corrPlatSaltConc",
hoboSampleID = "hoboSampleID",
discharge = "fieldDischarge",
waterTemp = "waterTemp",
wettedWidth = "wettedWidth",
plot = TRUE,
savePlotPath = NULL,
processingInfo = NULL,
eventsindexstart = NULL,
eventsindexend = NULL
){
if(!plot && !is.null(savePlotPath)){
stop("Please turn plotting on (plot = T) in order to save plots.")
}
namLocIdx <- which(names(inputFile) == namedLocation)
injTypeIdx <- which(names(inputFile) == injectionTypeName)
eventIDIdx <- which(names(inputFile) == eventID)
staDistIdx <- which(names(inputFile) == stationToInjectionDistance)
plGasIdx <- which(names(inputFile) == plateauGasConc)
plSaltIdx <- which(names(inputFile) == corrPlatSaltConc)
loggerIdx <- which(names(inputFile) == hoboSampleID)
QIdx <- which(names(inputFile) == discharge)
watTempIdx <- which(names(inputFile) == waterTemp)
wwIdx <- which(names(inputFile) == wettedWidth)
##### Constants #####
#Coefficients for Least Squares Third-Order Polynomial Fits of Schmidt Number Versus Temperature
#Valid for 0 - 30 Celsius temperature range
#Table A1, Fresh water, Wanninkhof (1992), DOI: 10.1029/92JC00188
#See also Jahne et al. (1987), DOI: 10.4236/jep.2014.511103
A_O2 = 1800.6
B_O2 = 120.10
C_O2 = 3.7818
D_O2 = 0.047608
A_CO2 = 1911.1
B_CO2 = 118.11
C_CO2 = 3.4527
D_CO2 = 0.041320
A_SF6 = 3255.3
B_SF6 = 217.13
C_SF6 = 6.8370
D_SF6 = 0.086070
Sc_CO2 = 600 #Schmidt number of O2 at 20 C in fresh water
convLpsCms = 1/1000 #Conversion from litersPerSecond to cubicMetersPerSecond
#Reaeration Rate Conversion
#Equation 7, Wanninkhof (1990), DOI: 10.1029/WR026i007p01621
Sc_O2_25 <- A_O2 - B_O2 * 25 + C_O2 * 25^2 - D_O2 * 25^3
Sc_SF6_25 <- A_SF6 - B_SF6 * 25 + C_SF6 * 25^2 - D_SF6 * 25^3
reaRateConv <- (Sc_O2_25/Sc_SF6_25) ^ (-0.5)
#Create output file
outputDFNames <- c(
'siteID',
'injType',
'startDate',
'eventID',
'bgdsalt',
'cvbgdSalt',
'lossRateSF6',
'lossRateSF6_clip',
'S1PeakTime',
'S4PeakTime',
'peakMaxTravelTime',
'S1CentroidTime',
'S4CentroidTime',
'centroidTravelTime',
'S1NominalTime',
'S4NominalTime',
'nominalTravelTime',
'S1HarmonicMeanTime',
'S4HarmonicMeanTime',
'harmonicMeanTravelTime',
'releaseDist',
'releasetoPeakTravelTime',
'releasetoCentroidTravelTime',
'btwStaDist',
'peakMaxVelocity',
'releaseVelocity_peak',
'releaseVelocity_nominal',
'releaseVelocity',
'meanDepth',
'Width',
'reaRateO2',
'gasTransVelO2',
'meanQ',
'meanTemp',
'k600',
'K600'
)
#Only use the unique eventIDs
eventIDs<-unique(inputFile$eventID)[eventsindexstart:eventsindexend]
inputFile<-inputFile[inputFile$eventID %in% eventIDs,]
allEventID <- unique(inputFile[[eventIDIdx]])
outputDF <- data.frame(matrix(data=NA, ncol=length(outputDFNames),
nrow=length(allEventID)))
names(outputDF) <- outputDFNames
outputDF$eventID <- allEventID
#Check for correct date format
if(all(grepl("20[0-9]{2}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z",loggerData$dateTimeLogger))){
dateFormat <- "%Y-%m-%dT%H:%M:%SZ"
loggerData$dateTimeLogger <- as.POSIXct(loggerData$dateTimeLogger, format = dateFormat, tz = "UTC")
}else if(all(grepl("20[0-9]-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\\.000\\+0000",loggerData$dateTimeLogger))){
dateFormat <- "%Y-%m-%dT%H:%M:%S.000+0000"
loggerData$dateTimeLogger <- as.POSIXct(loggerData$dateTimeLogger, format = dateFormat, tz = "UTC")
}else if(!"POSIXct" %in% is(loggerData$dateTimeLogger)){
stop("Inconsistent or unidentified date formats in conductivity logger data.")
}
for(i in seq(along = outputDF$eventID)){
#for(i in 21:22){
modelInjType <- FALSE
currEventID <- outputDF$eventID[i]
#Uncomment this if you'd like to see a list of all the eventIDs for troubleshooting or debugging
#print(paste0(i, " - ", currEventID))
injectionType <- unique(inputFile[inputFile[[eventIDIdx]] == currEventID & !is.na(inputFile[[injTypeIdx]]), injTypeIdx])
outputDF$injType<-injectionType
outputDF$meanQ[i] <- mean(inputFile[inputFile[[eventIDIdx]] == currEventID, QIdx], na.rm = T)*convLpsCms # m^3 s^-1
if(length(injectionType)<1){
cat("Warning - Injection type unknown for",currEventID,"\n")
next
}
#Calculations for the "model" slug injections only TBD
#For the moment just skip those
if(injectionType %in% c("model","model - slug","model - CRI")){
print(paste0("Model injection type, cannot calculate loss rate for ", currEventID))
modelInjType <- TRUE
}
#Use drip of slug time for the experiment start time
slugTime <- unique(inputFile$slugPourTime[inputFile[[eventIDIdx]] == currEventID])
injTime <- unique(inputFile$dripStartTime[inputFile[[eventIDIdx]] == currEventID])
if(is.na(slugTime)){
currExpStartTime <- unique(inputFile$dripStartTime[inputFile[[eventIDIdx]] == currEventID])
}else{
currExpStartTime <- unique(inputFile$slugPourTime[inputFile[[eventIDIdx]] == currEventID])
}
if(is.na(currExpStartTime)){
cat('Warning, experiment startTime could not be determined for',currEventID)
}
outputDF$siteID[i] <- unique(substr(inputFile[[namLocIdx]][inputFile[[eventIDIdx]] == currEventID], 1, 4))
S1 <- paste(outputDF$siteID[i], "AOS.reaeration.station.01", sep = ".")
S2 <- paste(outputDF$siteID[i], "AOS.reaeration.station.02", sep = ".")
S3 <- paste(outputDF$siteID[i], "AOS.reaeration.station.03", sep = ".")
S4 <- paste(outputDF$siteID[i], "AOS.reaeration.station.04", sep = ".")
outputDF$meanQ[i] <- mean(inputFile[inputFile[[eventIDIdx]] == currEventID, QIdx], na.rm = T)*convLpsCms # m^3 s^-1
#outputDF$Width[i] <- inputFile[inputFile[[namLocIdx]] == S4 & inputFile[[eventIDIdx]] == currEventID, wwIdx] #meters
#try changing that line - 275
outputDF$Width[i] <- mean(inputFile[inputFile[[eventIDIdx]] == currEventID, wwIdx], na.rm=TRUE) #meters
# outputDF$meanDepth[i] <- outputDF$meanQ[i]/(inputFile[inputFile[[namLocIdx]] == S4 & inputFile[[eventIDIdx]] == currEventID, wwIdx]*outputDF$centroidVelocity[i]) # meters
if(!modelInjType){
#Background correct salt samples, normalize gas concentration, and natural log transform the plateau gas concentrations
backSalt <- inputFile$backgroundSaltConc[inputFile$eventID == currEventID]
platSalt <- as.character(inputFile$plateauSaltConc[inputFile$eventID == currEventID])
platGas <- as.character(inputFile$plateauGasConc[inputFile$eventID == currEventID])
statDist <- inputFile$stationToInjectionDistance[inputFile$eventID == currEventID]
#here need to work in averaging the background samples
#If the background values are below detection, just use 0 to subtract
#OKAY SO ASSUMING THAT NA MEANS BELOW DETECTION AND THAT IT WAS ACTUALLY MEASURED?
if(any(is.na(backSalt))){
backSalt[is.na(backSalt)] <- 0
}
bgdsalt<-mean(backSalt, na.rm=TRUE)
cvbgdSalt<-sd(backSalt, na.rm=TRUE)/bgdsalt
try(outputDF$bgdsalt[i]<-bgdsalt)
try(outputDF$cvbgdSalt[i]<-cvbgdSalt)
x <- NA
y <- NA
meanY <- NA
for(j in 1:length(statDist)){ #for each station for each experiment
currStart <- (j-1)*5
# currBack <- backSalt[j]
currBack<-bgdsalt
currPlatSalt <- as.numeric(strsplit(platSalt[j],"\\|")[[1]])
currPlatGas <- as.numeric(strsplit(platGas[j],"\\|")[[1]])
#Background correct plateau salt concentrations
corrPlatSalt <- NA
if(length(currPlatSalt)>0 && length(currBack)>0){
corrPlatSalt <- currPlatSalt-currBack
}
#Normalize plateau gas concentration to corrected plateau salt concentration
normPlatGas <- NA
if(length(currPlatGas)>0 && length(corrPlatSalt)>0 && any(!is.na(currPlatGas)) && any(!is.na(corrPlatSalt)) && length(currPlatGas)==length(corrPlatSalt)){
normPlatGas <- currPlatGas/corrPlatSalt
}
if(length(currPlatSalt)<1 || length(currBack)<1 || length(currPlatGas)<1 || all(is.na(currPlatGas)) || all(is.na(currPlatGas))){
print(paste0("Tracer data for station ",j,", eventID ",currEventID," not available."))
next
}
#would like to work in storing these error messages and the event IDs in a data frame
if(min(normPlatGas, na.rm = T) <= 0 | min(corrPlatSalt, na.rm = T) <= 0){
print("A gas concentration or background corrected salt concentration is zero or negative producing NaNs for LNgasNormalizedToSalt")
}
#normPlatGas is the normalized gas conc
normPlatGas[normPlatGas <= 0] <- NA #why the hell would it ever be negative? shouldnt those be removed anyway?
corrPlatSalt[corrPlatSalt <= 0] <- NA
logNormPlatGas <- try(log(normPlatGas))
numVals <- min(length(corrPlatSalt),length(normPlatGas))
x[(1+currStart):(numVals+currStart)] <- statDist[j]
y[(1+currStart):(numVals+currStart)] <- logNormPlatGas
meanY[j] <- log(mean(currPlatGas, na.rm = T)/mean(corrPlatSalt, na.rm = T)) #one normalized value per stations
}
#close the per station setup
#Calculate the Loss Rate, slope of the salt corrected SF6 over the reach
lineFit <- NA
#Warnings when there isn't data suppressed
suppressWarnings(try(lineFit <- lsfit(statDist,meanY), silent = T))
if(sum(is.na(lineFit))){
print(paste0("Warning, loss rate could not be determined for ", currEventID))
next
}
#Clean up y for plotting if there are Inf values
x <- x[!is.infinite(y)]
y <- y[!is.infinite(y)]
try(outputDF$lossRateSF6[i] <- lineFit$coefficients[[2]], silent = T)
#ok now do this without station 1
linefit_clip<- NA
clipdf<-data.frame(statDist=statDist, meanY=meanY)
clipdf<-clipdf[clipdf$statDist!=min(clipdf$statDist, na.rm=TRUE),]
# suppressWarnings(try(lineFit_clip <- lsfit(statDist[2:length(statDist)],meanY[2:length(statDist)]), silent = T))
suppressWarnings(try(lineFit_clip <- lsfit(clipdf$statDist, clipdf$meanY), silent = T))
if(sum(is.na(lineFit_clip))){
print(paste0("Warning, loss rate could not be determined without Stn 1 for ", currEventID))
next
}
#don't need to clean up for plotting - will just plot the other line on the same
try(outputDF$lossRateSF6_clip[i] <- lineFit_clip$coefficients[[2]], silent = T)
#so clip is without station 1
if(plot == T & !all(is.na(x)) & !all(is.na(y))){
#Save out plot of loss rate to specified directory
if(!is.null(savePlotPath)){
png(paste0(savePlotPath,"/lossRate_",currEventID,".png"))
plot(x,y,main = currEventID, xlab = "meters downstream of injection", ylab = "LN(Tracer Gas/Background Corrected Tracer Salt)", col = "blue")
points(statDist,meanY, pch = 19)
abline(a = lineFit$coefficients[["Intercept"]], b = lineFit$coefficients[["X"]])
abline(a = lineFit_clip$coefficients[["Intercept"]], b = lineFit_clip$coefficients[["X"]], col='blue')
mtext(paste("y = ", lineFit$coefficients[[2]], "x +", lineFit$coefficients[[1]], "\n Click anywhere to close and continue"), cex = 0.8)
dev.off()
}
invisible(dev.new(noRStudioGD = TRUE, width=12, height=7))
plot(x,y,main = currEventID, xlab = "meters downstream of injection", ylab = "LN(Tracer Gas/Background Corrected Tracer Salt)", col = "blue")
points(statDist,meanY, pch=19)
abline(a = lineFit$coefficients[["Intercept"]], b = lineFit$coefficients[["X"]])
abline(a = lineFit_clip$coefficients[["Intercept"]], b = lineFit_clip$coefficients[["X"]], col='blue')
# mtext(paste("y = ", lineFit$coefficients[[2]], "x +", lineFit$coefficients[[1]], "\n Click anywhere to close and continue"), cex = 0.8)
mtext(paste("y = ", lineFit$coefficients[[2]], "x +", lineFit$coefficients[[1]], "\n Click anywhere to close and continue"), cex = 0.8)
#print("Click anywhere on the plot to close and continue")
ans <- identify(x, y, n = 1, tolerance = 100, plot = F)
invisible(dev.off())
}
}
#okay so this saves a loss rate even when its completely wrong (positive)
#New section that requires the user to pick the range of data for the peak or plateau rising limb
#currEventID <- currEventID
s1LoggerData <- loggerData[loggerData$hoboSampleID == paste0(substr(currEventID, 1, 4), "_S1_", substr(currEventID, 6, 13)),]
s1LoggerData <- s1LoggerData[order(s1LoggerData$measurementNumber),]
s2LoggerDeployed <- FALSE
s4LoggerData <- loggerData[loggerData$hoboSampleID == paste0(substr(currEventID, 1, 4), "_S4_", substr(currEventID, 6, 13)),]
s4LoggerData <- s4LoggerData[order(s4LoggerData$measurementNumber),]
if(length(s1LoggerData[[1]]) <= 0 & length(s4LoggerData[[1]]) <= 0){
print(paste0("Conductivity logger data not available for ", currEventID, ", stations S1 & S4"))
next
}else if(length(s1LoggerData[[1]]) <= 0){
#This is added in for times when the first logger is at station 2 instead of station 1
print(paste0("Conductivity logger data not available for ", currEventID, ", station S1, looking for S2 logger data."))
s2LoggerData <- loggerData[loggerData$hoboSampleID == paste0(substr(currEventID, 1, 4), "_S2_", substr(currEventID, 6, 13)),]
s2LoggerData <- s2LoggerData[order(s2LoggerData$measurementNumber),]
if(length(s2LoggerData[[1]]) <= 0|length(s2LoggerData[[1]]) < 10){
print(paste0("Conductivity logger data not available/sufficient for ", currEventID, ", station S2"))
next
}else{
print(paste0("Conductivity logger data found available for ", currEventID, ", station S2"))
s1LoggerData <- s2LoggerData
s2LoggerDeployed <- TRUE
}
}else if(length(s4LoggerData[[1]]) <= 0){
print(paste0("Conductivity logger data not available for ", currEventID, ", station S4"))
next
}
if(length(s1LoggerData[[1]]) < 10){
print(paste0("Conductivity logger data has less than ten points for ", currEventID, ", station S1"))
next
}else if(length(s4LoggerData[[1]]) < 10){
print(paste0("Conductivity logger data has less than ten points for ", currEventID, ", station S4"))
next
}
#If low range isn't collected use the full range
if(!all(is.na(s1LoggerData$lowRangeSpCondNonlinear))){
condDataS1 <- s1LoggerData[,c("dateTimeLogger","lowRangeSpCondNonlinear")]
s1RangeFull <- FALSE
}else if(!all(is.na(s1LoggerData$fullRangeSpCondNonlinear))){
condDataS1 <- s1LoggerData[,c("dateTimeLogger","fullRangeSpCondNonlinear")]
s1RangeFull <- TRUE
}else{
print(paste0("Conductivity logger data not available for ", currEventID, ", station S1"))
next
}
if(!all(is.na(s4LoggerData$lowRangeSpCondNonlinear))){
condDataS4 <- s4LoggerData[,c("dateTimeLogger","lowRangeSpCondNonlinear")]
s4RangeFull <- FALSE
}else if(!all(is.na(s4LoggerData$fullRangeSpCondNonlinear))){
condDataS4 <- s4LoggerData[,c("dateTimeLogger","fullRangeSpCondNonlinear")]
s4RangeFull <- TRUE
}else{
print(paste0("Conductivity logger data not available for ", currEventID, ", station S4"))
next
}
names(condDataS1) <- c("dateTimeLogger","spCond")
names(condDataS4) <- c("dateTimeLogger","spCond")
if(is.na(currExpStartTime)){
currExpStartTime <- max(condDataS1$dateTimeLogger[1], condDataS4$dateTimeLogger[1])
}
#Find the peak locations - and need to change this to save the plots in the directory as well
station <- 'Station_1'
s1peakLoc <- manualpeakTime(loggerDataIn = condDataS1,
currEventID = currEventID,
injectionType = injectionType,
expStartTime = currExpStartTime,
station = station) # index to get date and time of peak/plateau half max
station <- 'Station_4'
# s4peakLoc <- reaRate::def.calc.peakTime(loggerDataIn = condDataS4,
s4peakLoc <- manualpeakTime(loggerDataIn = condDataS4,
currEventID = currEventID,
injectionType = injectionType,
expStartTime = currExpStartTime,
station=station) # index to get date and time of peak/plateau half max
#the peak locations get used for travel time later
#If either of the peakTimes are NULL move on to the next eventID
if(is.null(s4peakLoc)){
print(paste0("Conductivity logger data peak/plateau cannot be identified for ", currEventID, ", station S4"))
next
}
#Get the dates from the indices and subtract to get travel time
if(typeof(s4peakLoc)=='list') {
if(!is.null(s4peakLoc$peakTime)) {
outputDF$S4PeakTime[i] <- s4peakLoc$peakTime } else {
next
} } else {
next
}
# use the release point
if(!is.null(s4peakLoc$peakTime) & length(s4peakLoc$peakTime) > 0) {
if(!is.na(currExpStartTime)) {
outputDF$releasetoPeakTravelTime[i] <- difftime(s4peakLoc$peakTime,
currExpStartTime,
units = "secs") } else {
next} }
if(!is.null(s4peakLoc$nominalTime)& length(s4peakLoc$nominalTime) > 0) {
outputDF$S4NominalTime[i] <- s4peakLoc$nominalTime } else {
next
}
if(!is.null(s4peakLoc$nominalTime) & length(s4peakLoc$nominalTime) > 0) {
if(!is.na(currExpStartTime)) {
outputDF$releasetoNominalTravelTime[i] <- difftime(s4peakLoc$nominalTime,
currExpStartTime,
units = "secs") }} else {
next}
if(!is.null(s4peakLoc$nominalTime) & length(s4peakLoc$nominalTime) > 0) {
if(!is.na(currExpStartTime)) {
outputDF$releasetoNominalTravelTime[i] <- difftime(s4peakLoc$nominalTime,
currExpStartTime,
units = "secs") }} else {
next}
}
outputList <- list("outputDF"=outputDF,"inputFile"=inputFile)
return(outputList)
}
#########removed for now before bracket before output list
#Plot the travel times to check
if(plot==T){
s1YData <- condDataS1$spCond[condDataS1$dateTimeLogger > currExpStartTime & condDataS1$dateTimeLogger < s1peakLoc$endPlotTime]
s4YData <- condDataS4$spCond[condDataS4$dateTimeLogger > currExpStartTime & condDataS4$dateTimeLogger < s4peakLoc$endPlotTime]
#outputDF$peakMaxVelocity[i] <- outputDF$btwStaDist[i]/as.numeric(outputDF$peakMaxTravelTime[i]) # m/s
#outputDF$centroidVelocity[i] <- outputDF$btwStaDist[i]/as.numeric(outputDF$centroidTravelTime[i]) # m/s
#outputDF$harmonicMeanVelocity[i] <- outputDF$btwStaDist[i]/as.numeric(outputDF$harmonicMeanTravelTime[i]) # m/s
outputDF$releaseVelocity_peak[i] <- outputDF$releaseDist[i]/as.numeric(outputDF$releasetoPeakTravelTime[i])
outputDF$releaseVelocity_nominal[i] <- outputDF$releaseDist[i]/as.numeric(outputDF$releasetoNominalTravelTime[i])
if(length(s1YData)<1) next
if(length(s4YData)<1) next
# invisible(dev.new(noRStudioGD = TRUE, width=12, height=7))
x <- condDataS1$dateTimeLogger[condDataS1$dateTimeLogger > currExpStartTime & condDataS1$dateTimeLogger < s1peakLoc$endPlotTime]
#y <- s1LoggerData$fullRangeSpCondNonlinear[s1peakLoc$peakStart:s1peakLoc$peakEnd]
minTime <- currExpStartTime
maxTime <- max(s1peakLoc$endPlotTime,s4peakLoc$endPlotTime)
minY <- min(s1YData,s4YData,na.rm = TRUE)
maxY <- max(s1YData,s4YData,na.rm = TRUE)
#Save out plot of loss rate to specified directory
if(!is.null(savePlotPath)){
png(paste0(savePlotPath,"/travelTime_",currEventID,".png"))
plot(x,
s1YData,
xlim = c(minTime,maxTime),
ylim = c(minY,maxY),
ylab = "Conductivity, uS",
xlab = "Time (UTC)")
mtext(paste0("Peak Travel Time = ",round(as.numeric(outputDF$peakMaxTravelTime[i])/60,digits=1) ," min)\n Click anywhere to close and continue"), cex = 1.2)
points(condDataS4$dateTimeLogger[condDataS4$dateTimeLogger > currExpStartTime & condDataS4$dateTimeLogger < s4peakLoc$endPlotTime],
s4YData,
col = "blue")
try(abline(v = s1peakLoc$centroidTime))
try(abline(v = s4peakLoc$centroidTime, col = "blue"))
abline(v = s1peakLoc$peakTime, col='red')
abline(v = s4peakLoc$peakTime, col='red')
graphics::legend(x = "bottomright", legend = c("upstream","downstream"), lty = c(1,1), col = c("black","blue"))
dev.off()
}
invisible(dev.new(noRStudioGD = TRUE, width=12, height=7))
plot(x,
s1YData,
xlim = c(minTime,maxTime),
ylim = c(minY,maxY),
ylab = "Conductivity, uS",
xlab = "Time (UTC)")
mtext(paste0("Travel Time = ",outputDF$travelTime[i]," seconds, (",round(as.numeric(outputDF$centroidTravelTime[i])/60,digits=1) ," min)\n Click anywhere to close and continue"), cex = 1.2)
points(condDataS4$dateTimeLogger[condDataS4$dateTimeLogger > currExpStartTime & condDataS4$dateTimeLogger < s4peakLoc$endPlotTime],
s4YData,
col = "blue")
try(abline(v = s1peakLoc$centroidTime))
try(abline(v = s4peakLoc$centroidTime, col = "blue"))
abline(v = s1peakLoc$peakTime, col='red')
abline(v = s4peakLoc$peakTime, col='red')
graphics::legend(x = "bottomright", legend = c("upstream","downstream"), lty = c(1,1), col = c("black","blue"))
ans <- identify(x, s1YData, n = 1, tolerance = 100, plot = F)
invisible(dev.off())
}
#More calculations to get to the reaeration rate - no idea what S2 logger is for - need to check in
if(s2LoggerDeployed){
outputDF$btwStaDist[i] <- inputFile[inputFile[[namLocIdx]] == S4 & inputFile[[eventIDIdx]] == currEventID, staDistIdx] -
inputFile[inputFile[[namLocIdx]] == S2 & inputFile[[eventIDIdx]] == currEventID, staDistIdx] # meters
outputDF$releaseDist[i] <- inputFile[inputFile[[namLocIdx]] == S4 & inputFile[[eventIDIdx]] == currEventID, staDistIdx]
}else{
outputDF$btwStaDist[i] <- inputFile[inputFile[[namLocIdx]] == S4 & inputFile[[eventIDIdx]] == currEventID, staDistIdx] -
inputFile[inputFile[[namLocIdx]] == S1 & inputFile[[eventIDIdx]] == currEventID, staDistIdx]
outputDF$releaseDist[i] <- inputFile[inputFile[[namLocIdx]] == S4 & inputFile[[eventIDIdx]] == currEventID, staDistIdx] # meters
}
outputDF$peakMaxVelocity[i] <- outputDF$btwStaDist[i]/as.numeric(outputDF$peakMaxTravelTime[i]) # m/s
#outputDF$centroidVelocity[i] <- outputDF$btwStaDist[i]/as.numeric(outputDF$centroidTravelTime[i]) # m/s
#outputDF$harmonicMeanVelocity[i] <- outputDF$btwStaDist[i]/as.numeric(outputDF$harmonicMeanTravelTime[i]) # m/s
outputDF$releaseVelocity_peak[i] <- outputDF$releaseDist[i]/as.numeric(outputDF$releasetoPeakTravelTime[i]) # m/s