diff --git a/.gitignore b/.gitignore
index 2e504efd7120f00948edadd6d16a8189f0332582..8ab2149e584fa00e61611fc7e781d576cc7e048c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
 /data
 /node_modules
 /.vs
-*.png
\ No newline at end of file
+*.png
+*.npy
\ No newline at end of file
diff --git a/__pycache__/numba_cuda_fcns.cpython-37.pyc b/__pycache__/numba_cuda_fcns.cpython-37.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cfd9e5df5fb171c52bab7a798cfdba4691048750
Binary files /dev/null and b/__pycache__/numba_cuda_fcns.cpython-37.pyc differ
diff --git a/controllers/gddAccumulatedController.js b/controllers/gddAccumulatedController.js
index 824ee076d4b155dcca3513b526d2772dc786789d..1769239479ba4d3fcfcc30db373f11ba0339c627 100644
--- a/controllers/gddAccumulatedController.js
+++ b/controllers/gddAccumulatedController.js
@@ -35,6 +35,34 @@ function find(collection, query, projection, t_base, res, product) {
     })
 }
 
+function find_normal(collection, query, projection, t_base, res, product) {
+    collection.findOne(query, projection).then(function(data) {
+        gdd_base = [];
+        if (product == "corn") {
+            gdd_base = data["corn_normals_gdd_base"];
+        } else {
+            gdd_base = data["normals_gdd_base"];
+        }
+    
+        var gdds = [];
+        
+        var min_temp = 0
+        var max_temp = 0
+
+        var gdd_sum = 0;
+        
+        for (var i = 0; i < gdd_base.length; i++) {
+            gdd_value = (gdd_base[i] - t_base);//utils.calculate_gdd(min_temps[i], max_temps[i], t_base, product);
+            gdd_value = gdd_value < 0 ? 0 : gdd_value;
+            gdd_sum += gdd_value
+            gdds.push(gdd_sum)
+        }
+        send_response("Accumulated GDDs", gdds, data, res);
+    }, function(err) {
+        res.status(500).send({"internal error": err})
+    })
+}
+
 
 exports.accumulated_gdd = function (req, res) {
 
@@ -164,11 +192,11 @@ exports.accumulated_normal_gdd = function (req, res) {
     }
 
     var projection = {
-        min_temps: 1,
-        max_temps: 1,
+        normals_gdd_base: 1,
+        corn_normals_gdd_base: 1,
         location: 1,
     }
 
 
-    find(gdd_normal_collection, query, projection, t_base, res, product);   
+    find_normal(gdd_normal_collection, query, projection, t_base, res, product);   
 };
\ No newline at end of file
diff --git a/controllers/minMaxController.js b/controllers/gddConfidenceInterval.js
similarity index 55%
rename from controllers/minMaxController.js
rename to controllers/gddConfidenceInterval.js
index 188a865219c4398fb233df084a867d14fb959d67..4af9ff0d1e8465fdb39a2f033da1121e6220f103 100644
--- a/controllers/minMaxController.js
+++ b/controllers/gddConfidenceInterval.js
@@ -1,6 +1,17 @@
 normals_collection = require('../models/normals.js');
 utils = require('../lib/utils');
 
+
+var ci_convert = {
+    80: 1.282,
+    85: 1.440,
+    90: 1.645,
+    95: 1.960,
+    99: 2.576,
+    99.5: 2.807,
+    99.9: 3.291
+}
+
 function send_response(message, gdds, data, res) {
     res.json({
         message: message,
@@ -11,37 +22,47 @@ function send_response(message, gdds, data, res) {
     })
 }
 
-function find(collection, query, projection, t_base, res, product) {
-    // var total_years = new Date().getFullYear() -  new Date(1981, 0, 1, 0, 0, 0, 0).getFullYear() - 1;
-    // const arr_mean = arr => arr.reduce((a,b) => a + b, 0) / arr.length;
+function find(collection, query, projection, t_base, res, product, confidence_interval) {
+    var total_years = new Date().getFullYear() -  new Date(1981, 0, 1, 0, 0, 0, 0).getFullYear() - 1;
+    var z_score = ci_convert[confidence_interval];
     collection.findOne(query, projection).then(function(data) {
-
-        var combined_min_means = data["combined_min_mean"];
-        var combined_max_means = data["combined_max_mean"];
+        // console.log(data);
+        var gdd_base = []
+        var gdd_std = []
+        if (product == "corn") {
+            gdd_base = data["corn_global_mean"];
+            gdd_std = data["corn_std"];
+        } else {
+            gdd_base = data["global_mean"];
+            gdd_std = data["main_std"];
+        }
         
         var min_gdds = [];
         var max_gdds = [];
+        var gdd_value = 0
         var min_gdd_value = 0;
         var max_gdd_value = 0;
+        var gdd_sum = 0;
         var min_gdd_sum = 0;
         var max_gdd_sum = 0;
 
-        for (var i = 0; i < combined_min_means.length; i++) {
-            if (product == "corn") {
-                max_gdd_value = combined_max_means[i] - t_base;
-                min_gdd_value = combined_min_means[i] - t_base;
-            } else {
-                max_gdd_value = utils.calculated_gdd_mean(combined_max_means[i], t_base);
-                min_gdd_value = utils.calculated_gdd_mean(combined_min_means[i], t_base);
-            }
-            min_gdd_sum += min_gdd_value;
-            max_gdd_sum += max_gdd_value;
+        for (var i = 0; i < gdd_base.length; i++) {
+            gdd_value = gdd_base[i];
+
+            max_gdd_value = (gdd_value + (gdd_std[i]/(total_years ** 0.5) * z_score)) - t_base
+            min_gdd_value = (gdd_value - (gdd_std[i]/(total_years ** 0.5) * z_score)) - t_base
+            
+            min_gdd_value = min_gdd_value < 0 ? 0 : min_gdd_value;
+            max_gdd_value = max_gdd_value < 0 ? 0 : max_gdd_value;
+
+            min_gdd_sum += min_gdd_value
+            max_gdd_sum += max_gdd_value
 
             min_gdds.push(min_gdd_sum);
             max_gdds.push(max_gdd_sum);
         }
         res.json({
-            message: "min and max gdd",
+            message: "confidence interval bounds gdd",
             minimum: min_gdds,
             maximum: max_gdds,
             closest_lon: data["location"]["coordinates"][0],
@@ -53,11 +74,23 @@ function find(collection, query, projection, t_base, res, product) {
 }
 
 
-exports.min_max = function (req, res) {
+exports.confidence_interval = function (req, res) {
     var product = req.params.product;
 
     var latitude = parseFloat(req.body.latitude)
     var longitude = parseFloat(req.body.longitude)
+    var confidence_interval = 90
+
+    if (req.body.hasOwnProperty("confidence_interval")) {
+        confidence_interval = parseFloat(req.body.confidence_interval);
+    }
+
+    if (ci_convert.hasOwnProperty(confidence_interval) == false) {
+        errors.push({
+            parameter_error: "confidence interval",
+            message: "confidence interval must be 80, 85, 90, 95, 99, 99.5 or 99.9"
+        });
+    }
 
     var query = {
         location: { 
@@ -100,14 +133,8 @@ exports.min_max = function (req, res) {
         })
     }
 
-    var projection = {
-        // combined_max_mean: 1,
-        // combined_min_mean: 1,
-        // location: 1,
-    }
-
 
-    find(normals_collection, query, projection, t_base, res, product);
+    find(normals_collection, query, {}, t_base, res, product, confidence_interval);
     
    
 };
\ No newline at end of file
diff --git a/controllers/gddController.js b/controllers/gddController.js
index 33c48c9d161b575d52468e1bdd182c58f19a5340..6e6e51a4f1518057933cdc4153a56cf3d544d9f7 100644
--- a/controllers/gddController.js
+++ b/controllers/gddController.js
@@ -35,6 +35,7 @@ function find(collection, query, projection, t_base, res, product) {
 
 
 exports.year_gdd = function (req, res) {
+
     var year = parseInt(req.params.year);
     var product = req.params.product;
 
diff --git a/controllers/gddNormalController.js b/controllers/gddNormalController.js
index 6ea874dcd456fc4bcf25d2e57c75c461587b0134..6adf582c271abae5ef7048a413334d4af27f04d2 100644
--- a/controllers/gddNormalController.js
+++ b/controllers/gddNormalController.js
@@ -2,17 +2,24 @@ normals_collection = require('../models/normals.js');
 utils = require('../lib/utils');
 
 function find(collection, query, projection, t_base, res, product) {
-    collection.findOne(query, projection).then(function(data) {
-        var min_temps = data["min_temps"]
-        var max_temps = data["max_temps"]
+    collection.findOne(query).then(function(data) {
+        gdd_base = [];
+        if (product == "corn") {
+            gdd_base = data["corn_normals_gdd_base"];
+        } else {
+            gdd_base = data["normals_gdd_base"];
+        }
+    
         var gdds = [];
         
         var min_temp = 0
         var max_temp = 0
 
-        for (var i = 0; i < min_temps.length; i++) {
-            gdd_value = utils.calculate_gdd(min_temps[i], max_temps[i], t_base, product);
-
+        var gdd_sum = 0;
+        
+        for (var i = 0; i < gdd_base.length; i++) {
+            gdd_value = (gdd_base[i] - t_base);//utils.calculate_gdd(min_temps[i], max_temps[i], t_base, product);
+            gdd_value = gdd_value < 0 ? 0 : gdd_value;
             gdds.push(gdd_value)
         }
 
diff --git a/lib/utils.js b/lib/utils.js
index de2ef459a5ba458fa37d459a53df7b7bbeaacb62..135e6c49d037a4650c4b5d91fe193219dc830aec 100644
--- a/lib/utils.js
+++ b/lib/utils.js
@@ -50,6 +50,7 @@ function calculate_gdd(min_temp, max_temp, t_base, product) {
     var gdd = 0;
     if (product == "corn") {
         min_temp = min_temp >= 50 ? min_temp : 50;
+        min_temp = min_temp <= 86 ? min_temp : 86;
         max_temp = max_temp >= 50 ? max_temp : 50;
         max_temp = max_temp <= 86 ? max_temp : 86;
         mean = ((max_temp + min_temp) / 2);
diff --git a/models/normals.js b/models/normals.js
index b462fae5702b3c9b5dce558e94ee055943fb78f3..4ac5065b2b1da977ce7574e38baa84aad1e47bb6 100644
--- a/models/normals.js
+++ b/models/normals.js
@@ -20,18 +20,24 @@ var normalsSchema = mongoose.Schema({
         type: Number,
         required: true
     },
-    min_temps: {
+    normals_gdd_base: {
         type: Array,
     },
-    max_temps: {
+    corn_normals_gdd_base: {
         type: Array,
     },
-    combined_max_mean: {
+    corn_std: {
         type: Array,
     },
-    combined_min_mean: {
+    main_std: {
         type: Array,
     },
+    global_mean: {
+        type: Array,
+    },
+    corn_global_mean: {
+        type: Array,
+    }
 });
 
 var Model = module.exports = mongoose.model('normals', normalsSchema, "normals");
diff --git a/routes.js b/routes.js
index 8c3fa3880efb89fea7e3efa683b8b1acb0d1e307..892577d37b0ce350cf8345272127be0afe1f7c40 100644
--- a/routes.js
+++ b/routes.js
@@ -3,12 +3,12 @@ const accController = require("./controllers/gddAccumulatedController")
 const controller = require("./controllers/gddController")
 const normController = require("./controllers/gddNormalController")
 const freezingDatesController = require("./controllers/freezingDatesController")
-const minMaxController = require("./controllers/minMaxController")
+const confidenceIntervalController = require("./controllers/gddConfidenceInterval")
 
 router.route("/:product/daily/:year/accumulated").post(accController.accumulated_gdd)
 router.route("/:product/daily/:year").post(controller.year_gdd)
 router.route("/:product/normal").post(normController.normal)
-router.route("/:product/minmax").post(minMaxController.min_max)
+router.route("/:product/confidence").post(confidenceIntervalController.confidence_interval)
 router.route("/:product/normal/accumulated").post(accController.accumulated_normal_gdd)
 router.route("/freezing/:temperature").post(freezingDatesController.freezing_dates)
 
diff --git a/swagger_definition.yaml b/swagger_definition.yaml
index a8ac06980f0e8dc88db406742d3fdb8cd35bcc2d..3b28fc2d3b4889dc02c2752270857c016ee7f6b3 100644
--- a/swagger_definition.yaml
+++ b/swagger_definition.yaml
@@ -443,10 +443,9 @@ paths:
                                               message:
                                                   type: string
                                                   example: 22.5 is out of bounds for freezing date calculations. Must be between 24.083334 - 49.916668     
-    /api/{product}/minmax:
+    /api/{product}/confidence:
       post:
-          summary: Returns maximum and minimum accumulated yearly gdd data
-          description: Returns the year with the highest and lowest accumulated gdds for a specific location and crop type. Includes both the year and the data for both max and min
+          summary: Returns confience interval for specified product
           produces:
               - application/json
           consumes:
@@ -467,6 +466,7 @@ paths:
                           required:
                               - longitude
                               - latitude
+                              - confidence_interval
                           properties:
                               latitude:
                                   description: latitude to calculate gdd on
@@ -480,6 +480,11 @@ paths:
                                   minimum: -125.0
                                   maximum: -66.5
                                   example: -76.94
+                              confidence_interval:
+                                  description: confidence interval to use
+                                  type: number
+                                  enum: [80, 85, 90, 95, 99.5, 99.7]
+                                  example: 90
                   
           responses:
               200:
@@ -491,7 +496,7 @@ paths:
                               properties:
                                   message:
                                       type: string
-                                      example: min and max gdd
+                                      example: confidence interval bounds gdd
                                   minimum:
                                       type: array
                                       items:
@@ -500,12 +505,6 @@ paths:
                                       type: array
                                       items:
                                           type: number
-                                  minimum_year:
-                                      type: number
-                                      minimum: 1981
-                                  maximum_year:
-                                      type: number
-                                      minimum: 1981
                                   closest_lat:
                                       type: number
                                       minimum: 24.083334
diff --git a/test_server.py b/test_server.py
index a78a556add0c1afc105b7a79c17f90cfc52a30bf..9283d12b11151a2a4db83b906da8e2cc415fc6a6 100644
--- a/test_server.py
+++ b/test_server.py
@@ -2,6 +2,7 @@ import requests
 import os
 import time 
 import matplotlib.pyplot as plt
+import numpy as np
 
 data = {
     "latitude": 38.98567,
@@ -31,19 +32,24 @@ print (time.time() - t)
 r = requests.post("http://localhost:4000/api/freezing/28.2", data=data)
 print (r.status_code)
 print (r.json())
-r = requests.post("http://localhost:4000/api/soybean/minmax", data=data)
+r = requests.post("http://localhost:4000/api/corn/confidence", data=data)
 print (r.status_code)
 print (r.json())
 
 min_gdds = r.json()["minimum"]
 max_gdds = r.json()["maximum"]
 
-r = requests.post("http://localhost:4000/api/soybean/normal/accumulated", data=data)
+r = requests.post("http://localhost:4000/api/corn/normal/accumulated", data=data)
 print (r.status_code)
 print (r.json())
 
 normals = r.json()["data"]
 
+# max_gdds = np.array(max_gdds) + np.array(normals)
+# max_gdds /= 2
+# min_gdds = np.array(min_gdds) + np.array(normals)
+# min_gdds /= 2
+
 plt.plot(list(range(len(normals))), normals, color="black")
 plt.plot(list(range(len(normals))), max_gdds, color="orange")
 plt.plot(list(range(len(normals))), min_gdds, color="blue")
diff --git a/to_mongo.py b/to_mongo.py
index 02ab4567529cd74a2f361098fd9f7d4eeeb5d9f3..d629b9422d1280595cea7c405ec3200070fb078c 100644
--- a/to_mongo.py
+++ b/to_mongo.py
@@ -1,8 +1,10 @@
 import numpy as np
 import xarray as xr
 import tqdm 
-import datetime
+import datetime, time, math
 from pymongo import MongoClient
+from numba import njit
+
 
 def is_leap_year(year):
     if (year % 4) == 0:
@@ -34,6 +36,8 @@ lat = coords.latitude.data
 lon = coords.longitude.data
 lat = lat[::-1]
 
+
+
 # years = list(range(1981, 2020 + 1))
 
 # for year in years:
@@ -116,6 +120,45 @@ lat = lat[::-1]
 ### 30 YEAR NORMALS ###
 ### Covers from 1981-2010 ###
 
+
+@njit(fastmath=True, parallel=True)
+def elemetwise(tmins, tmaxs):
+    return np.divide(np.add(tmins, tmaxs), 2)
+# @njit(fastmath=True, parallel=False)
+def std(arr, fill_arr):
+    i_size = 122
+    j_size = 69 # 207
+    k_size = 281 # 207
+    pbar = tqdm.tqdm(total=np.product([i for i in arr.shape[1:]]))
+
+    for j in range(0, fill_arr.shape[1], j_size):
+        for k in range(0, fill_arr.shape[2], k_size):
+            a = arr[:, :, j:j+j_size, k:k+k_size]
+            std_ = np.std(a, axis=0)
+
+            fill_arr[:, j:j+j_size, k:k+k_size] = std_
+            pbar.update(366 * j_size * k_size)
+    pbar.close()
+    return fill_arr
+
+a1 = np.where(38.5 < lat)[0].tolist()
+a2 = np.where(lat < 39.5)[0].tolist()
+lat_a = np.array(list(set(a1) & set(a2)))
+
+a1 = np.where(-77 < lon)[0].tolist()
+a2 = np.where(lon < -76)[0].tolist()
+lon_a = np.array(list(set(a1) & set(a2)))
+
+# x1 = np.array(np.meshgrid(lat_a, lon_a)).T.reshape(len(lat_a) * len(lon_a), 2).tolist()
+# x1 = [(z[0], z[1]) for z in x1]
+# x2 = [(a, b) for a, b in zip(x[0], x[1])] # fix to x = [..... (x[0], x[1])] and all limiting stuff above and below when atlas limit removed
+
+# x = list(set(x1) & set(x2))
+
+# print (lat.shape)
+# exit()
+
+
 db = client["gdd_database"]
 
 gdd = db.normals
@@ -127,100 +170,132 @@ gdd = db["normals"]
 resp = gdd.create_index([ ("location", "2dsphere") ])
 resp = gdd.create_index([ ("year", 1) ])
 
-single_year_min = np.zeros((366, 621, 1405))
-single_year_max = np.zeros((366, 621, 1405))
+year_gdd_base = np.memmap("year_gdd_base.npy", mode="w+", shape=(40, 366, len(lat_a), len(lon_a)), dtype=np.float32)
+corn_year_gdd_base = np.memmap("corn_year_gdd_base.npy", mode="w+", shape=(40, 366, len(lat_a), len(lon_a)), dtype=np.float32)
 
-ld_min = np.zeros((single_year_min.shape[-2], single_year_min.shape[-1]))
-ld_max = np.zeros((single_year_min.shape[-2], single_year_min.shape[-1]) )
+ld_min = np.empty((year_gdd_base.shape[-2], year_gdd_base.shape[-1]), dtype=np.float16)
+ld_max = np.empty((year_gdd_base.shape[-2], year_gdd_base.shape[-1]), dtype=np.float16)
 ld_count = 0 
 
-combined_min_mean = np.zeros((366, 621, 1405)) + 100000
-combined_max_mean = np.zeros((366, 621, 1405)) - 100000
-
-for year in range(1981, 2019+1):
+i = 0
+for year in range(1981, 2020+1):
     print (year)
+    t = time.time()
     data = xr.open_dataset("data/temps_%s.nc" % year)
 
     tmins = data.tmin.data
     tmaxs = data.tmax.data 
 
+    tmaxs = tmaxs[np.ix_(list(range(len(tmaxs))),lat_a, lon_a)]
+    tmins = tmins[np.ix_(list(range(len(tmaxs))),lat_a, lon_a)]
+    
+    tmins = tmins.astype(np.float32)#np.cast(tmins, np.float16)
+    tmaxs = tmaxs.astype(np.float32)#np.cast(tmins, np.float16)
+    del data
+
     ## idx 59 is leap day
 
     if not is_leap_year(year): # extra day in leap year screws everything up
-
-        insert = np.zeros((1, tmins.shape[-2], tmins.shape[-1])) 
-        insert[:] = np.nan
-
-        tmin_1 = tmins[:59]
-        tmin_2 = tmins[59:]
-
-        tmax_1 = tmaxs[:59]
-        tmin_2 = tmaxs[59:]
-
-        tmins = np.concatenate([tmin_1, insert, tmin_2], axis=0)
-        tmaxs = np.concatenate([tmax_1, insert, tmin_2], axis=0)
+        tmins = np.insert(tmins, 59, np.nan, axis=0)
+        tmaxs = np.insert(tmaxs, 59, np.nan, axis=0)
     
     else:
-
+        # TODO One for normal one for reg
         ld_min += tmins[59]
         ld_max += tmaxs[59]
         ld_count += 1
 
-    if year <= 2010:
+    test = elemetwise(tmins, tmaxs)
+    year_gdd_base[i] = test#(tmaxs + tmins) / 2
+    over_86 = tmins > 86
+    under_50 = tmaxs < 50
+    tmaxs[over_86] = 86  
+    tmaxs[under_50] = 50  
+    tmins[over_86] = 86  
+    tmins[under_50] = 50 
 
-        single_year_max += tmaxs/30
-        single_year_min += tmins/30
+    test = elemetwise(tmins, tmaxs)
 
+    corn_year_gdd_base[i] = test#(tmaxs + tmins) / 2
+    i += 1
 
-    mean = (tmins + tmaxs) / 2
 
-    where_min = np.where(mean <= combined_min_mean)
-    where_max = np.where(mean >= combined_max_mean)
+print ("generating means")
+normals_gdd_base = year_gdd_base[:30].mean(axis=0)
+corn_normals_gdd_base = corn_year_gdd_base[:30].mean(axis=0)
 
-    combined_max_mean[where_max] = mean[where_max]
-    combined_min_mean[where_min] = mean[where_min]
+corn_global_mean = corn_year_gdd_base.mean(axis=0)
+gdd_base_mean = year_gdd_base.mean(axis=0)
 
 ld_min = ld_min/ld_count
 ld_max = ld_max/ld_count
 
-single_year_min[59] = ld_min 
-single_year_max[59] = ld_max
+ld_gdd_base = (ld_min + ld_max) / 2
+ld_min = np.clip(ld_min, 50, 86)
+ld_max = np.clip(ld_max, 50, 86)
+corn_ld_gdd_base = (ld_min + ld_max) / 2
+
+corn_normals_gdd_base[59] = corn_ld_gdd_base
+normals_gdd_base[59] = ld_gdd_base
+
+corn_global_mean[59] = corn_ld_gdd_base
+gdd_base_mean[59] = ld_gdd_base
+
+corn_year_gdd_base[:, 59] = corn_ld_gdd_base
+year_gdd_base[:, 59] = ld_gdd_base
+print ("completed first bit")
 
-x = np.where(~np.isnan(np.nanmean(single_year_max, axis=0)))
+corn_year_gdd_std = np.empty((366, len(lat_a), len(lon_a)))
+year_gdd_base_std = np.empty((366, len(lat_a), len(lon_a)))
+
+print ("corn std")
+corn_year_gdd_std = std(corn_year_gdd_base, corn_year_gdd_std)
+print ("gdd std")
+year_gdd_base_std = std(year_gdd_base, year_gdd_base_std)
+print ("done")
+
+x = np.where(~np.isnan(np.nanmean(corn_normals_gdd_base, axis=0)))
 # x = [(a, b) for a, b in zip(x[0], x[1])]
 
 # FORCE LOCATIONS TO COLLEGE PARK, LAT 38.99 LON -76.94 BECAUSE OF ATLAS LIMIT
 
-a1 = np.where(38.5 < lat)[0].tolist()
-a2 = np.where(lat < 39.5)[0].tolist()
-lat_a = np.array(list(set(a1) & set(a2)))
+# a1 = np.where(38.5 < lat)[0].tolist()
+# a2 = np.where(lat < 39.5)[0].tolist()
+# lat_a = np.array(list(set(a1) & set(a2)))
 
-a1 = np.where(-77 < lon)[0].tolist()
-a2 = np.where(lon < -76)[0].tolist()
-lon_a = np.array(list(set(a1) & set(a2)))
+# a1 = np.where(-77 < lon)[0].tolist()
+# a2 = np.where(lon < -76)[0].tolist()
+# lon_a = np.array(list(set(a1) & set(a2)))
 
 x1 = np.array(np.meshgrid(lat_a, lon_a)).T.reshape(len(lat_a) * len(lon_a), 2).tolist()
 x1 = [(z[0], z[1]) for z in x1]
 x2 = [(a, b) for a, b in zip(x[0], x[1])] # fix to x = [..... (x[0], x[1])] and all limiting stuff above and below when atlas limit removed
 
-x = list(set(x1) & set(x2))
+# x = list(set(x1) & set(x2))
+# print (x2)
 
-tmins = single_year_min
-tmaxs = single_year_max
+# tmins = single_year_min
+# tmaxs = single_year_max
 
 locs = []
 
 count = 0
-for i in tqdm.tqdm(x):
+for i in tqdm.tqdm(x2):
     if len(locs) % 100 == 0 and len(locs) != 0:
         new_result = gdd.insert_many(locs)
         locs = []
     
-    tmin_ = tmins[:, i[0], i[1]]
-    tmax_ = tmaxs[:, i[0], i[1]]
+    normals_gdd_base_ = normals_gdd_base[:, i[0], i[1]]
+    corn_normals_gdd_base_ = corn_normals_gdd_base[:, i[0], i[1]]
+
+    # print (normals_gdd_base_)40
 
-    combined_min_ = combined_min_mean[:, i[0], i[1]]
-    combined_max_ = combined_max_mean[:, i[0], i[1]]
+    corn_std_ = corn_year_gdd_std[:, i[0], i[1]]
+    main_std_ = year_gdd_base_std[:, i[0], i[1]]
+    corn_mean = corn_global_mean[:, i[0], i[1]]
+    main_mean = gdd_base_mean[:, i[0], i[1]]
+    # corn_std_ = corn_year_gdd_std[:, i[0], i[1]]
+    # main_std_ = year_gdd_base_std[:, i[0], i[1]]
     
     lat_ = lat[i[0]]
     lon_ = lon[i[1]]
@@ -237,10 +312,16 @@ for i in tqdm.tqdm(x):
     t["prism_lat"] = int(a[0])
     t["prism_lon"] = int(a[1])
     
-    t["min_temps"] = list([float(a) for a in tmin_])
-    t["max_temps"] = list([float(a) for a in tmax_])
-    t["combined_min_mean"] = list([float(a) for a in combined_min_])
-    t["combined_max_mean"] = list([float(a) for a in combined_max_])
+    t["normals_gdd_base"] = list([float(a) for a in normals_gdd_base_])
+    t["corn_normals_gdd_base"] = list([float(a) for a in corn_normals_gdd_base_])
+    t["global_mean"] = list([float(a) for a in main_mean])
+    t["corn_global_mean"] = list([float(a) for a in corn_mean])
+    # t["combined_min_mean"] = list([float(a) for a in combined_min_])
+    # t["combined_max_mean"] = list([float(a) for a in combined_max_])
+    # t["corn_std"] = list([float(a) for a in corn_std_])
+    # t["main_std"] = list([float(a) for a in main_std_])
+    t["corn_std"] = list([float(a) for a in corn_std_])
+    t["main_std"] = list([float(a) for a in main_std_])
     t["_id"] = _id
     
     locs.append(t)