Skip to content
Snippets Groups Projects
Commit 14b63059 authored by tuckersiegel's avatar tuckersiegel
Browse files

mass update of min max to confidence interval

parent f53ba562
No related branches found
No related tags found
No related merge requests found
/data
/node_modules
/.vs
*.png
\ No newline at end of file
*.png
*.npy
\ No newline at end of file
File added
......@@ -35,6 +35,34 @@ function find(collection, query, projection, t_base, res, product) {
})
}
function find_normal(collection, query, projection, t_base, res, product) {
collection.findOne(query, projection).then(function(data) {
gdd_base = [];
if (product == "corn") {
gdd_base = data["corn_normals_gdd_base"];
} else {
gdd_base = data["normals_gdd_base"];
}
var gdds = [];
var min_temp = 0
var max_temp = 0
var gdd_sum = 0;
for (var i = 0; i < gdd_base.length; i++) {
gdd_value = (gdd_base[i] - t_base);//utils.calculate_gdd(min_temps[i], max_temps[i], t_base, product);
gdd_value = gdd_value < 0 ? 0 : gdd_value;
gdd_sum += gdd_value
gdds.push(gdd_sum)
}
send_response("Accumulated GDDs", gdds, data, res);
}, function(err) {
res.status(500).send({"internal error": err})
})
}
exports.accumulated_gdd = function (req, res) {
......@@ -164,11 +192,11 @@ exports.accumulated_normal_gdd = function (req, res) {
}
var projection = {
min_temps: 1,
max_temps: 1,
normals_gdd_base: 1,
corn_normals_gdd_base: 1,
location: 1,
}
find(gdd_normal_collection, query, projection, t_base, res, product);
find_normal(gdd_normal_collection, query, projection, t_base, res, product);
};
\ No newline at end of file
normals_collection = require('../models/normals.js');
utils = require('../lib/utils');
var ci_convert = {
80: 1.282,
85: 1.440,
90: 1.645,
95: 1.960,
99: 2.576,
99.5: 2.807,
99.9: 3.291
}
function send_response(message, gdds, data, res) {
res.json({
message: message,
......@@ -11,37 +22,47 @@ function send_response(message, gdds, data, res) {
})
}
function find(collection, query, projection, t_base, res, product) {
// var total_years = new Date().getFullYear() - new Date(1981, 0, 1, 0, 0, 0, 0).getFullYear() - 1;
// const arr_mean = arr => arr.reduce((a,b) => a + b, 0) / arr.length;
function find(collection, query, projection, t_base, res, product, confidence_interval) {
var total_years = new Date().getFullYear() - new Date(1981, 0, 1, 0, 0, 0, 0).getFullYear() - 1;
var z_score = ci_convert[confidence_interval];
collection.findOne(query, projection).then(function(data) {
var combined_min_means = data["combined_min_mean"];
var combined_max_means = data["combined_max_mean"];
// console.log(data);
var gdd_base = []
var gdd_std = []
if (product == "corn") {
gdd_base = data["corn_global_mean"];
gdd_std = data["corn_std"];
} else {
gdd_base = data["global_mean"];
gdd_std = data["main_std"];
}
var min_gdds = [];
var max_gdds = [];
var gdd_value = 0
var min_gdd_value = 0;
var max_gdd_value = 0;
var gdd_sum = 0;
var min_gdd_sum = 0;
var max_gdd_sum = 0;
for (var i = 0; i < combined_min_means.length; i++) {
if (product == "corn") {
max_gdd_value = combined_max_means[i] - t_base;
min_gdd_value = combined_min_means[i] - t_base;
} else {
max_gdd_value = utils.calculated_gdd_mean(combined_max_means[i], t_base);
min_gdd_value = utils.calculated_gdd_mean(combined_min_means[i], t_base);
}
min_gdd_sum += min_gdd_value;
max_gdd_sum += max_gdd_value;
for (var i = 0; i < gdd_base.length; i++) {
gdd_value = gdd_base[i];
max_gdd_value = (gdd_value + (gdd_std[i]/(total_years ** 0.5) * z_score)) - t_base
min_gdd_value = (gdd_value - (gdd_std[i]/(total_years ** 0.5) * z_score)) - t_base
min_gdd_value = min_gdd_value < 0 ? 0 : min_gdd_value;
max_gdd_value = max_gdd_value < 0 ? 0 : max_gdd_value;
min_gdd_sum += min_gdd_value
max_gdd_sum += max_gdd_value
min_gdds.push(min_gdd_sum);
max_gdds.push(max_gdd_sum);
}
res.json({
message: "min and max gdd",
message: "confidence interval bounds gdd",
minimum: min_gdds,
maximum: max_gdds,
closest_lon: data["location"]["coordinates"][0],
......@@ -53,11 +74,23 @@ function find(collection, query, projection, t_base, res, product) {
}
exports.min_max = function (req, res) {
exports.confidence_interval = function (req, res) {
var product = req.params.product;
var latitude = parseFloat(req.body.latitude)
var longitude = parseFloat(req.body.longitude)
var confidence_interval = 90
if (req.body.hasOwnProperty("confidence_interval")) {
confidence_interval = parseFloat(req.body.confidence_interval);
}
if (ci_convert.hasOwnProperty(confidence_interval) == false) {
errors.push({
parameter_error: "confidence interval",
message: "confidence interval must be 80, 85, 90, 95, 99, 99.5 or 99.9"
});
}
var query = {
location: {
......@@ -100,14 +133,8 @@ exports.min_max = function (req, res) {
})
}
var projection = {
// combined_max_mean: 1,
// combined_min_mean: 1,
// location: 1,
}
find(normals_collection, query, projection, t_base, res, product);
find(normals_collection, query, {}, t_base, res, product, confidence_interval);
};
\ No newline at end of file
......@@ -35,6 +35,7 @@ function find(collection, query, projection, t_base, res, product) {
exports.year_gdd = function (req, res) {
var year = parseInt(req.params.year);
var product = req.params.product;
......
......@@ -2,17 +2,24 @@ normals_collection = require('../models/normals.js');
utils = require('../lib/utils');
function find(collection, query, projection, t_base, res, product) {
collection.findOne(query, projection).then(function(data) {
var min_temps = data["min_temps"]
var max_temps = data["max_temps"]
collection.findOne(query).then(function(data) {
gdd_base = [];
if (product == "corn") {
gdd_base = data["corn_normals_gdd_base"];
} else {
gdd_base = data["normals_gdd_base"];
}
var gdds = [];
var min_temp = 0
var max_temp = 0
for (var i = 0; i < min_temps.length; i++) {
gdd_value = utils.calculate_gdd(min_temps[i], max_temps[i], t_base, product);
var gdd_sum = 0;
for (var i = 0; i < gdd_base.length; i++) {
gdd_value = (gdd_base[i] - t_base);//utils.calculate_gdd(min_temps[i], max_temps[i], t_base, product);
gdd_value = gdd_value < 0 ? 0 : gdd_value;
gdds.push(gdd_value)
}
......
......@@ -50,6 +50,7 @@ function calculate_gdd(min_temp, max_temp, t_base, product) {
var gdd = 0;
if (product == "corn") {
min_temp = min_temp >= 50 ? min_temp : 50;
min_temp = min_temp <= 86 ? min_temp : 86;
max_temp = max_temp >= 50 ? max_temp : 50;
max_temp = max_temp <= 86 ? max_temp : 86;
mean = ((max_temp + min_temp) / 2);
......
......@@ -20,18 +20,24 @@ var normalsSchema = mongoose.Schema({
type: Number,
required: true
},
min_temps: {
normals_gdd_base: {
type: Array,
},
max_temps: {
corn_normals_gdd_base: {
type: Array,
},
combined_max_mean: {
corn_std: {
type: Array,
},
combined_min_mean: {
main_std: {
type: Array,
},
global_mean: {
type: Array,
},
corn_global_mean: {
type: Array,
}
});
var Model = module.exports = mongoose.model('normals', normalsSchema, "normals");
......@@ -3,12 +3,12 @@ const accController = require("./controllers/gddAccumulatedController")
const controller = require("./controllers/gddController")
const normController = require("./controllers/gddNormalController")
const freezingDatesController = require("./controllers/freezingDatesController")
const minMaxController = require("./controllers/minMaxController")
const confidenceIntervalController = require("./controllers/gddConfidenceInterval")
router.route("/:product/daily/:year/accumulated").post(accController.accumulated_gdd)
router.route("/:product/daily/:year").post(controller.year_gdd)
router.route("/:product/normal").post(normController.normal)
router.route("/:product/minmax").post(minMaxController.min_max)
router.route("/:product/confidence").post(confidenceIntervalController.confidence_interval)
router.route("/:product/normal/accumulated").post(accController.accumulated_normal_gdd)
router.route("/freezing/:temperature").post(freezingDatesController.freezing_dates)
......
......@@ -443,10 +443,9 @@ paths:
message:
type: string
example: 22.5 is out of bounds for freezing date calculations. Must be between 24.083334 - 49.916668
/api/{product}/minmax:
/api/{product}/confidence:
post:
summary: Returns maximum and minimum accumulated yearly gdd data
description: Returns the year with the highest and lowest accumulated gdds for a specific location and crop type. Includes both the year and the data for both max and min
summary: Returns confience interval for specified product
produces:
- application/json
consumes:
......@@ -467,6 +466,7 @@ paths:
required:
- longitude
- latitude
- confidence_interval
properties:
latitude:
description: latitude to calculate gdd on
......@@ -480,6 +480,11 @@ paths:
minimum: -125.0
maximum: -66.5
example: -76.94
confidence_interval:
description: confidence interval to use
type: number
enum: [80, 85, 90, 95, 99.5, 99.7]
example: 90
responses:
200:
......@@ -491,7 +496,7 @@ paths:
properties:
message:
type: string
example: min and max gdd
example: confidence interval bounds gdd
minimum:
type: array
items:
......@@ -500,12 +505,6 @@ paths:
type: array
items:
type: number
minimum_year:
type: number
minimum: 1981
maximum_year:
type: number
minimum: 1981
closest_lat:
type: number
minimum: 24.083334
......
......@@ -2,6 +2,7 @@ import requests
import os
import time
import matplotlib.pyplot as plt
import numpy as np
data = {
"latitude": 38.98567,
......@@ -31,19 +32,24 @@ print (time.time() - t)
r = requests.post("http://localhost:4000/api/freezing/28.2", data=data)
print (r.status_code)
print (r.json())
r = requests.post("http://localhost:4000/api/soybean/minmax", data=data)
r = requests.post("http://localhost:4000/api/corn/confidence", data=data)
print (r.status_code)
print (r.json())
min_gdds = r.json()["minimum"]
max_gdds = r.json()["maximum"]
r = requests.post("http://localhost:4000/api/soybean/normal/accumulated", data=data)
r = requests.post("http://localhost:4000/api/corn/normal/accumulated", data=data)
print (r.status_code)
print (r.json())
normals = r.json()["data"]
# max_gdds = np.array(max_gdds) + np.array(normals)
# max_gdds /= 2
# min_gdds = np.array(min_gdds) + np.array(normals)
# min_gdds /= 2
plt.plot(list(range(len(normals))), normals, color="black")
plt.plot(list(range(len(normals))), max_gdds, color="orange")
plt.plot(list(range(len(normals))), min_gdds, color="blue")
......
import numpy as np
import xarray as xr
import tqdm
import datetime
import datetime, time, math
from pymongo import MongoClient
from numba import njit
def is_leap_year(year):
if (year % 4) == 0:
......@@ -34,6 +36,8 @@ lat = coords.latitude.data
lon = coords.longitude.data
lat = lat[::-1]
# years = list(range(1981, 2020 + 1))
# for year in years:
......@@ -116,6 +120,45 @@ lat = lat[::-1]
### 30 YEAR NORMALS ###
### Covers from 1981-2010 ###
@njit(fastmath=True, parallel=True)
def elemetwise(tmins, tmaxs):
return np.divide(np.add(tmins, tmaxs), 2)
# @njit(fastmath=True, parallel=False)
def std(arr, fill_arr):
i_size = 122
j_size = 69 # 207
k_size = 281 # 207
pbar = tqdm.tqdm(total=np.product([i for i in arr.shape[1:]]))
for j in range(0, fill_arr.shape[1], j_size):
for k in range(0, fill_arr.shape[2], k_size):
a = arr[:, :, j:j+j_size, k:k+k_size]
std_ = np.std(a, axis=0)
fill_arr[:, j:j+j_size, k:k+k_size] = std_
pbar.update(366 * j_size * k_size)
pbar.close()
return fill_arr
a1 = np.where(38.5 < lat)[0].tolist()
a2 = np.where(lat < 39.5)[0].tolist()
lat_a = np.array(list(set(a1) & set(a2)))
a1 = np.where(-77 < lon)[0].tolist()
a2 = np.where(lon < -76)[0].tolist()
lon_a = np.array(list(set(a1) & set(a2)))
# x1 = np.array(np.meshgrid(lat_a, lon_a)).T.reshape(len(lat_a) * len(lon_a), 2).tolist()
# x1 = [(z[0], z[1]) for z in x1]
# x2 = [(a, b) for a, b in zip(x[0], x[1])] # fix to x = [..... (x[0], x[1])] and all limiting stuff above and below when atlas limit removed
# x = list(set(x1) & set(x2))
# print (lat.shape)
# exit()
db = client["gdd_database"]
gdd = db.normals
......@@ -127,100 +170,132 @@ gdd = db["normals"]
resp = gdd.create_index([ ("location", "2dsphere") ])
resp = gdd.create_index([ ("year", 1) ])
single_year_min = np.zeros((366, 621, 1405))
single_year_max = np.zeros((366, 621, 1405))
year_gdd_base = np.memmap("year_gdd_base.npy", mode="w+", shape=(40, 366, len(lat_a), len(lon_a)), dtype=np.float32)
corn_year_gdd_base = np.memmap("corn_year_gdd_base.npy", mode="w+", shape=(40, 366, len(lat_a), len(lon_a)), dtype=np.float32)
ld_min = np.zeros((single_year_min.shape[-2], single_year_min.shape[-1]))
ld_max = np.zeros((single_year_min.shape[-2], single_year_min.shape[-1]) )
ld_min = np.empty((year_gdd_base.shape[-2], year_gdd_base.shape[-1]), dtype=np.float16)
ld_max = np.empty((year_gdd_base.shape[-2], year_gdd_base.shape[-1]), dtype=np.float16)
ld_count = 0
combined_min_mean = np.zeros((366, 621, 1405)) + 100000
combined_max_mean = np.zeros((366, 621, 1405)) - 100000
for year in range(1981, 2019+1):
i = 0
for year in range(1981, 2020+1):
print (year)
t = time.time()
data = xr.open_dataset("data/temps_%s.nc" % year)
tmins = data.tmin.data
tmaxs = data.tmax.data
tmaxs = tmaxs[np.ix_(list(range(len(tmaxs))),lat_a, lon_a)]
tmins = tmins[np.ix_(list(range(len(tmaxs))),lat_a, lon_a)]
tmins = tmins.astype(np.float32)#np.cast(tmins, np.float16)
tmaxs = tmaxs.astype(np.float32)#np.cast(tmins, np.float16)
del data
## idx 59 is leap day
if not is_leap_year(year): # extra day in leap year screws everything up
insert = np.zeros((1, tmins.shape[-2], tmins.shape[-1]))
insert[:] = np.nan
tmin_1 = tmins[:59]
tmin_2 = tmins[59:]
tmax_1 = tmaxs[:59]
tmin_2 = tmaxs[59:]
tmins = np.concatenate([tmin_1, insert, tmin_2], axis=0)
tmaxs = np.concatenate([tmax_1, insert, tmin_2], axis=0)
tmins = np.insert(tmins, 59, np.nan, axis=0)
tmaxs = np.insert(tmaxs, 59, np.nan, axis=0)
else:
# TODO One for normal one for reg
ld_min += tmins[59]
ld_max += tmaxs[59]
ld_count += 1
if year <= 2010:
test = elemetwise(tmins, tmaxs)
year_gdd_base[i] = test#(tmaxs + tmins) / 2
over_86 = tmins > 86
under_50 = tmaxs < 50
tmaxs[over_86] = 86
tmaxs[under_50] = 50
tmins[over_86] = 86
tmins[under_50] = 50
single_year_max += tmaxs/30
single_year_min += tmins/30
test = elemetwise(tmins, tmaxs)
corn_year_gdd_base[i] = test#(tmaxs + tmins) / 2
i += 1
mean = (tmins + tmaxs) / 2
where_min = np.where(mean <= combined_min_mean)
where_max = np.where(mean >= combined_max_mean)
print ("generating means")
normals_gdd_base = year_gdd_base[:30].mean(axis=0)
corn_normals_gdd_base = corn_year_gdd_base[:30].mean(axis=0)
combined_max_mean[where_max] = mean[where_max]
combined_min_mean[where_min] = mean[where_min]
corn_global_mean = corn_year_gdd_base.mean(axis=0)
gdd_base_mean = year_gdd_base.mean(axis=0)
ld_min = ld_min/ld_count
ld_max = ld_max/ld_count
single_year_min[59] = ld_min
single_year_max[59] = ld_max
ld_gdd_base = (ld_min + ld_max) / 2
ld_min = np.clip(ld_min, 50, 86)
ld_max = np.clip(ld_max, 50, 86)
corn_ld_gdd_base = (ld_min + ld_max) / 2
corn_normals_gdd_base[59] = corn_ld_gdd_base
normals_gdd_base[59] = ld_gdd_base
corn_global_mean[59] = corn_ld_gdd_base
gdd_base_mean[59] = ld_gdd_base
corn_year_gdd_base[:, 59] = corn_ld_gdd_base
year_gdd_base[:, 59] = ld_gdd_base
print ("completed first bit")
x = np.where(~np.isnan(np.nanmean(single_year_max, axis=0)))
corn_year_gdd_std = np.empty((366, len(lat_a), len(lon_a)))
year_gdd_base_std = np.empty((366, len(lat_a), len(lon_a)))
print ("corn std")
corn_year_gdd_std = std(corn_year_gdd_base, corn_year_gdd_std)
print ("gdd std")
year_gdd_base_std = std(year_gdd_base, year_gdd_base_std)
print ("done")
x = np.where(~np.isnan(np.nanmean(corn_normals_gdd_base, axis=0)))
# x = [(a, b) for a, b in zip(x[0], x[1])]
# FORCE LOCATIONS TO COLLEGE PARK, LAT 38.99 LON -76.94 BECAUSE OF ATLAS LIMIT
a1 = np.where(38.5 < lat)[0].tolist()
a2 = np.where(lat < 39.5)[0].tolist()
lat_a = np.array(list(set(a1) & set(a2)))
# a1 = np.where(38.5 < lat)[0].tolist()
# a2 = np.where(lat < 39.5)[0].tolist()
# lat_a = np.array(list(set(a1) & set(a2)))
a1 = np.where(-77 < lon)[0].tolist()
a2 = np.where(lon < -76)[0].tolist()
lon_a = np.array(list(set(a1) & set(a2)))
# a1 = np.where(-77 < lon)[0].tolist()
# a2 = np.where(lon < -76)[0].tolist()
# lon_a = np.array(list(set(a1) & set(a2)))
x1 = np.array(np.meshgrid(lat_a, lon_a)).T.reshape(len(lat_a) * len(lon_a), 2).tolist()
x1 = [(z[0], z[1]) for z in x1]
x2 = [(a, b) for a, b in zip(x[0], x[1])] # fix to x = [..... (x[0], x[1])] and all limiting stuff above and below when atlas limit removed
x = list(set(x1) & set(x2))
# x = list(set(x1) & set(x2))
# print (x2)
tmins = single_year_min
tmaxs = single_year_max
# tmins = single_year_min
# tmaxs = single_year_max
locs = []
count = 0
for i in tqdm.tqdm(x):
for i in tqdm.tqdm(x2):
if len(locs) % 100 == 0 and len(locs) != 0:
new_result = gdd.insert_many(locs)
locs = []
tmin_ = tmins[:, i[0], i[1]]
tmax_ = tmaxs[:, i[0], i[1]]
normals_gdd_base_ = normals_gdd_base[:, i[0], i[1]]
corn_normals_gdd_base_ = corn_normals_gdd_base[:, i[0], i[1]]
# print (normals_gdd_base_)40
combined_min_ = combined_min_mean[:, i[0], i[1]]
combined_max_ = combined_max_mean[:, i[0], i[1]]
corn_std_ = corn_year_gdd_std[:, i[0], i[1]]
main_std_ = year_gdd_base_std[:, i[0], i[1]]
corn_mean = corn_global_mean[:, i[0], i[1]]
main_mean = gdd_base_mean[:, i[0], i[1]]
# corn_std_ = corn_year_gdd_std[:, i[0], i[1]]
# main_std_ = year_gdd_base_std[:, i[0], i[1]]
lat_ = lat[i[0]]
lon_ = lon[i[1]]
......@@ -237,10 +312,16 @@ for i in tqdm.tqdm(x):
t["prism_lat"] = int(a[0])
t["prism_lon"] = int(a[1])
t["min_temps"] = list([float(a) for a in tmin_])
t["max_temps"] = list([float(a) for a in tmax_])
t["combined_min_mean"] = list([float(a) for a in combined_min_])
t["combined_max_mean"] = list([float(a) for a in combined_max_])
t["normals_gdd_base"] = list([float(a) for a in normals_gdd_base_])
t["corn_normals_gdd_base"] = list([float(a) for a in corn_normals_gdd_base_])
t["global_mean"] = list([float(a) for a in main_mean])
t["corn_global_mean"] = list([float(a) for a in corn_mean])
# t["combined_min_mean"] = list([float(a) for a in combined_min_])
# t["combined_max_mean"] = list([float(a) for a in combined_max_])
# t["corn_std"] = list([float(a) for a in corn_std_])
# t["main_std"] = list([float(a) for a in main_std_])
t["corn_std"] = list([float(a) for a in corn_std_])
t["main_std"] = list([float(a) for a in main_std_])
t["_id"] = _id
locs.append(t)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment