Skip to content

Commit edc464c

Browse files
committed
merge master into met
2 parents ee60955 + 33df46a commit edc464c

9 files changed

Lines changed: 511 additions & 24 deletions

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,12 @@ Many of these tutorials have complex dependencies. In addition, many datasets ar
1515
The first research-grade version of TERRA REF data products will be released in November 2018.
1616
Before that, we will make evaluation releases available: the alpha version was released in November 2016 and the beta version will be released in 2017.
1717

18-
To access data please fill out a quick [Alpha User application](https://docs.google.com/forms/d/e/1FAIpQLScBsD042RrRok70BCGCRwARTcm9etvVHqvQaz1c5X7c5y0H3w/viewform?c=0&w=1).
18+
To access data please fill out a quick [Beta User application](http://terraref.org/beta).
1919

2020
### Links
2121

2222
TODO: add links to quick-start documentation, README's, code for learning and applied examples
2323

2424
* Data portal: terraref.org/data
2525
* Docker Images on Docker Hub: hub.docker.com/terraref
26-
* NDS Labs Workbench: terraref.ndslabs.org
26+
* NDS Labs Workbench: workbench.terraref.org

sensors/01-meteorological-data.Rmd

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -119,14 +119,15 @@ library(jsonlite)
119119
120120
url = ""
121121
mac_weather.list <- jsonlite::fromJSON('https://terraref.ncsa.illinois.edu/clowder/api/geostreams/datapoints?key=Pb3AUSqnUw&stream_id=4807&since=2017-01-02&until=2017-01-31')
122+
#mac_weather.list <- jsonlite::fromJSON('https://terraref.ncsa.illinois.edu/clowder/api/geostreams/datapoints?key=Pb3AUSqnUw&stream_id=3208&since=2017-01-02&until=2017-12-31', flatten = FALSE)
122123
123124
# change time to human-readable
124125
mac_weather <- mac_weather.list$properties %>%
125126
mutate(time = lubridate::ymd_hms(mac_weather.list$end_time))
126-
127-
128127
```
129128

129+
### Using
130+
130131
## Weather Summary
131132

132133

@@ -186,8 +187,9 @@ Did you see what we just did? We wrote some custom code to convert the units of
186187

187188
This higher resolution weather data can be used for VNIR calibration, for example. But at 1/s it is very large!
188189

190+
## Lets see how data are downloaded
189191

190-
### Lets see how data are downloaded
192+
Here we will download the files using the Clowder API, but note that if you have access to the filesystem (on www.workbench.terraref.org or globus, you can directly access the data in the `sites/ua-mac/Level_1/EnvironmentLogger`. Folder
191193

192194
```{r query-clowder}
193195
library(jsonlite)
@@ -215,7 +217,7 @@ ncfiles <- files[grepl('environmentlogger.nc', files$filename), ]
215217
print(ncfiles %>% select(id, filename))
216218
```
217219

218-
#### Download netCDF 1/s data from Clowder
220+
## Download netCDF 1/s data from Clowder
219221

220222

221223
```{r nc-download, echo=FALSE}
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
# Using the PEcAn atmospheric data utilities
2+
3+
Explain what these are
4+
5+
github.com/pecanproject/pecan
6+
7+
insert slide from talks ...
8+
9+
## Dependencies
10+
11+
```{r install-pecan-dependencies, message=FALSE, eval = FALSE}
12+
13+
devtools::install_github("pecanproject/pecan",
14+
subdir = 'base/utils', ref = 'develop', dependencies = FALSE)
15+
devtools::install_github("pecanproject/pecan",
16+
subdir = 'base/db')
17+
devtools::install_github("rforge/reddyproc",
18+
subdir = "pkg/REddyProc")
19+
devtools::install_github("pecanproject/pecan",
20+
subdir = 'modules/data.atmosphere',
21+
ref = 'develop')
22+
23+
source("https://raw.githubusercontent.com/PecanProject/pecan/develop/models/biocro/R/met2model.BIOCRO.R")
24+
```
25+
26+
27+
## PEcAn Met Workflow
28+
29+
```{r write-clowder, eval = FALSE}
30+
writeLines("
31+
<pecan>
32+
<clowder>
33+
<hostname>terraref.ncsa.illinois.edu</hostname>
34+
<user>user@illinois.edu</user>
35+
<password>ask</password>
36+
</clowder>
37+
</pecan>",
38+
con = "~/.pecan.clowder.xml")
39+
```
40+
41+
![](pecan.clowder.xml.png)
42+
43+
```{r pecan-met-workflow, message=FALSE, warning=FALSE, eval = FALSE}
44+
library("PEcAn.data.atmosphere")
45+
library("dplyr")
46+
47+
## download raw data
48+
ne <- download.Geostreams(
49+
outfolder="data",
50+
sitename="EnvironmentLogger sensor_weather_station",
51+
start_date="2016-02-28",
52+
end_date="2016-04-01",
53+
overwrite = TRUE)
54+
55+
## convert to standard
56+
ne_cf <- met2CF.Geostreams(
57+
in.path = "data/",
58+
in.prefix = ne$dbfile.name,
59+
outfolder = "data/cf",
60+
start_date = "2016-03-01", # note date shift to avoid TZ issues
61+
end_date = "2016-04-01",
62+
overwrite = TRUE)
63+
64+
## convert to model specific input
65+
met2model.BIOCRO(
66+
overwrite = TRUE,
67+
in.path = "data/cf",
68+
in.prefix = ne_cf$dbfile.name,
69+
outfolder = "data/biocromet",
70+
lat = 40,
71+
lon = -88,
72+
start_date = "2016-03-01",
73+
end_date = "2016-03-30")
74+
75+
met <- readr::read_csv('data/biocromet/Clowder.UIUC Energy Farm - NE.2016-02-28.2016-04-01.2016.csv')
76+
```
Lines changed: 24 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -7,25 +7,26 @@ from the workbench or Globus.
77
## Getting started
88

99
After installing terrautils, you should be able to import the *product* module.
10-
```
10+
```{python}
1111
from terrautils.products import get_sensor_list, unique_sensor_names
1212
from terrautils.products import get_file_listing, extract_file_paths
1313
```
1414

15-
The get\_sensor\_list and get\_file\_listing both require connection, url,
15+
The `get_sensor_list` and `get_file_listing` functions both require connection, url,
1616
and key parameters. *Connection* can be 'None', the *url* (called host in the
1717
code) should be something like https://terraref.ncsa.illinois.edu/clowder/.
1818
The *key* is a unique access key for the Clowder api.
1919

2020
## Getting the sensor list
21+
2122
The first thing to get is the sensor name. This can be retreived using the
22-
get\_sensor\_list function. This function returns the full record which may
23+
`get_sensor_list` function. This function returns the full record which may
2324
be useful in some cases but primarily includes sensor names that include
24-
a plot id number. The utility function unique_sensor_names accpets the
25+
a plot id number. The utility function `unique_sensor_names` accpets the
2526
sensor list and provides a list of names suitable for use in the
26-
get_file_listing function.
27+
`get_file_listing` function.
2728

28-
```
29+
```{python}
2930
sensors = get_sensor_list(None, url, key)
3031
names = unique_sensor_names(sensors)
3132
```
@@ -36,16 +37,16 @@ geostreams API. The currently available sensors are:
3637
* IR Surface Temperature
3738
* Thermal IR GeoTIFFs Datasets
3839
* flirIrCamera Datasets
39-
* (EL) sensor\_weather\_station
40+
* (EL) sensor_weather_station
4041
* Irrigation Observations
4142
* Canopy Cover
4243
* Energy Farm Observations SE
43-
* (EL) sensor\_par
44+
* (EL) sensor_par
4445
* scanner3DTop Datasets
4546
* Weather Observations
4647
* Energy Farm Observations NE
4748
* RGB GeoTIFFs Datasets
48-
* (EL) sensor\_co2
49+
* (EL) sensor_co2
4950
* stereoTop Datasets
5051
* Energy Farm Observations CEN
5152

@@ -55,28 +56,30 @@ The geostreams API can be used to get a list of datasets that overlap a
5556
specific plot boundary and, optionally, limited by a time range. Iterating
5657
over the datasets allows the paths to all the files to be extracted.
5758

58-
```
59+
```{python}
5960
sensor = 'Thermal IR GeoTIFFs Datasets'
6061
sitename = 'MAC Field Scanner Season 1 Field Plot 101 W'
6162
datasets = get_file_listing(None, url, key, sensor, sitename)
6263
files = extract_file_paths(datasets)
6364
```
6465

6566
Datasets can be further filtered using the *since* and *until* parameters
66-
of get\_file\_listing with a date string.
67+
of `get_file_listing` with a date string.
6768

68-
```
69+
```{python}
6970
dataset = get_file_listing(None, url, key, sensor, sitename,
7071
since='2016-06-01', until='2016-06-10')
7172
```
7273

7374

7475
# Alternative method
76+
7577
The following method demonstrates the same approach using the Clowder API. This
7678
approach is useful for understanding the data layout and when the Python
7779
terrautils package is not available.
7880

7981
## Finding plot ID
82+
8083
```
8184
SENSOR_NAME = "MAC Field Scanner Season 1 Field Plot 101 W"
8285
GET https://terraref.ncsa.illinois.edu/clowder/api/geostreams/sensors?sensor_name={SENSOR_NAME}
@@ -85,7 +88,9 @@ GET https://terraref.ncsa.illinois.edu/clowder/api/geostreams/sensors?sensor_nam
8588
This returns a JSON object with an 'id' parameter. You can use this ID parameter to specify the right data stream.
8689

8790
## Finding stream ID within a plot
91+
8892
The names are formatted as "<Sensor Group> Datasets (<Sensor ID>)".
93+
8994
```
9095
SENSOR_ID = 3355
9196
STREAM_NAME = "Thermal IR GeoTIFFs Datasets ({SENSOR_ID})"
@@ -95,13 +100,15 @@ GET https://terraref.ncsa.illinois.edu/clowder/api/geostreams/streams?stream_nam
95100
This returns a JSON object with an 'id' parameter. You can use this ID parameter to get the right datapoints.
96101

97102
## Listing Clowder file IDs for that plot & sensor stream
103+
98104
```
99105
STREAM_ID = "11586"
100106
GET https://terraref.ncsa.illinois.edu/clowder/api/geostreams/datapoints?stream_id={STREAM_ID}
101107
```
102108

103109
This returns a list of datapoint JSON objects, each with a 'properties' parameter that looks like:
104-
```
110+
111+
```{python}
105112
properties: {
106113
dataset_name: "Thermal IR GeoTIFFs - 2016-05-09__12-07-57-990",
107114
source_dataset: "https://terraref.ncsa.illinois.edu/clowder/datasets/59fc9e7d4f0c3383c73d2905"
@@ -111,19 +118,23 @@ properties: {
111118
The source_dataset URL can be used to view the dataset in Clowder.
112119

113120
You can also filter the datapoints by date:
121+
114122
```
115123
GET https://terraref.ncsa.illinois.edu/clowder/api/geostreams/datapoints?stream_id={STREAM_ID}&since=2017-01-02&until=2017-06-10
116124
```
117125

118126
## Getting ROGER file path from dataset
127+
119128
Given a source dataset URL, we can call the API to get the files and their paths.
129+
120130
```
121131
SOURCE_DATASET = "https://terraref.ncsa.illinois.edu/clowder/datasets/59fc9e7d4f0c3383c73d2905"
122132
# Add /api after /clowder, and add /files at the end of the URL
123133
GET "https://terraref.ncsa.illinois.edu/clowder/api/datasets/59fc9e7d4f0c3383c73d2905/files"
124134
```
125135

126136
This returns a list of files in the dataset and their paths if available:
137+
127138
```
128139
[
129140
{
Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
2+
## Hyperspectral Data
3+
4+
### Calibration Targets
5+
6+
These were collected on April 15 2017 every ~15 minutes
7+
8+
9+
```{r get-vnir-calibration, eval=FALSE}
10+
library(ncdf4)
11+
library(dplyr)
12+
13+
hsi_calibration_dir <- '/data/terraref/sites/ua-mac/Level_1/hyperspectral/2017-04-15'
14+
hsi_calibration_files <- dir(hsi_calibration_dir,
15+
recursive = TRUE,
16+
full.names = TRUE)
17+
18+
fileinfo <- bind_rows(lapply(hsi_calibration_files, file.info)) %>%
19+
mutate(size_gb = size/1073741824)
20+
21+
calibration_nc <- nc_open(hsi_calibration_files[200])
22+
a <- calibration_nc$var$rfl_img
23+
24+
25+
#calibration_nc$dim$x$len 1600
26+
#calibration_nc$dim$y$len
27+
x_length <- round(calibration_nc$dim$x$len / 10)
28+
y_length <- round(calibration_nc$dim$y$len * 3/4)
29+
30+
xstart <- ceiling(calibration_nc$dim$x$len / 2) - floor(x_length / 2) + 1
31+
32+
ystart <- ceiling(calibration_nc$dim$y$len / 2) - floor(y_length / 2) + 1
33+
34+
rfl <- ncvar_get(calibration_nc, 'rfl_img',
35+
#start = c(1, xstart, ystart),
36+
#count = c(955, x_length, y_length)
37+
start = c(2, 2, 2),
38+
count = c(1320, 10, 954)
39+
)
40+
x <- ncvar_get(calibration_nc, 'x', start = 100, count = 160)
41+
y <- ncvar_get(calibration_nc, 'y', start = 100, count = 1324)
42+
lambda <- calibration_nc$dim$wavelength$vals
43+
for(i in 1 + 0:10*95){
44+
image(x = x, y = y, z = rfl[i,,],
45+
xlab = 'x (m)', ylab = 'y (m)',
46+
col = rainbow(n=100),
47+
main = paste('wavelength',
48+
udunits2::ud.convert(lambda[i],'m','nm')))
49+
}
50+
51+
```

0 commit comments

Comments
 (0)