Commit c03a72d0 authored by abuddenberg's avatar abuddenberg

Cleaned up erroneous commit of conflict artifacts. Moved old data files into their own directory

parent fc4ffa4b
......@@ -4,7 +4,7 @@
<output url="file://$MODULE_DIR$/bin" />
<exclude-output />
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 2.7.3 (/Library/Frameworks/Python.framework/Versions/2.7/bin/python)" jdkType="Python SDK" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
......
......@@ -6,7 +6,6 @@ Created on Nov 20, 2012
DATA_DIR = '/Users/abuddenberg/workspace/plot_precip_maps/data/'
NA_PRECIP_FILES = [
<<<<<<< HEAD
(DATA_DIR + 'pr_rcp85_1970-1999_2071-2099_percent_change.nc_hatched_North_America_AR5_white.nc', 'pr_rcp85_2071-2099_north_american_{}.eps'),
(DATA_DIR + 'pr_rcp26_1970-1999_2071-2099_percent_change.nc_hatched_North_America_AR5_white.nc', 'pr_rcp26_2071-2099_north_american_{}.eps'),
(DATA_DIR + 'pr_sresa2_1970-1999_2071-2099_percent_change.nc_hatched_North_America_AR5_white.nc', 'pr_sresa2_2071-2099_north_american_{}.eps'),
......@@ -30,17 +29,6 @@ NA_SCENARIOS_FILES = {
GLOBAL_PRECIP_FILES = [
(DATA_DIR + 'pr_rcp26_1970-1999_2071-2099_percent_change.nc_hatched_Global_AR5_white.nc', 'pr_rcp26_2071-2099_global_{}.eps'),
(DATA_DIR + 'pr_rcp85_1970-1999_2071-2099_percent_change.nc_hatched_Global_AR5_white.nc', 'pr_rcp85_2071-2099_global_{}.eps')
=======
(DATA_DIR + 'pr_rcp85_1970-1999_2071-2099_percent_change.nc_hatched_North_America_AR5_white.nc', 'pr_rcp85_2071-2099_NA_{}.eps'),
(DATA_DIR + 'pr_rcp26_1970-1999_2071-2099_percent_change.nc_hatched_North_America_AR5_white.nc', 'pr_rcp26_2071-2099_NA_{}.eps'),
(DATA_DIR + 'pr_sresa2_1970-1999_2071-2099_percent_change.nc_hatched_North_America_AR5_white.nc', 'pr_sresa2_2071-2099_NA_{}.eps')
]
GLOBAL_PRECIP_FILES = [
(DATA_DIR + 'pr_rcp26_2071-2099_percent_change-1.nc_hatched_global_ar5_white.nc', 'pr_rcp26_2071-2099_global_{}.eps'),
(DATA_DIR + 'pr_rcp85_2071-2099_percent_change-1.nc_hatched_global_ar5_white.nc', 'pr_rcp85_2071-2099_global_{}.eps')
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
]
......
......@@ -36,7 +36,6 @@ for infilename, outfilename in GLOBAL_PRECIP_FILES:
m.drawcountries()
lons, lats = meshgrid(lon_data, lat_data)
<<<<<<< HEAD
#pcolor coordinates should refer to the lower left corner of the grid box;
# the data refers to the center of the grid box. Shift the lon and lat south and west by half the height and
......@@ -49,9 +48,6 @@ for infilename, outfilename in GLOBAL_PRECIP_FILES:
x, y = m(lons, lats)
x_shifted, y_shifted = m(lons_shifted, lats_shifted)
=======
x,y = m(lons, lats)
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
#Build boolean masks of the gridpoint for each category
stipples_mask = np.ma.getmask(np.ma.masked_equal(signif, 1.))
......@@ -72,26 +68,16 @@ for infilename, outfilename in GLOBAL_PRECIP_FILES:
data = np.ma.masked_array(data.filled(2.0)) #2.0 denotes areas of statistical uncertainty
<<<<<<< HEAD
weird = m.pcolor(x_shifted, y_shifted, data)
m.colorbar(weird,location='right',pad="5%")
m.scatter(x, y, 3, marker='o')
=======
weird = m.pcolor(x,y, data)
m.colorbar(weird,location='right',pad="5%")
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
#Tests for overlap (There shouldn't be any)
# print np.any(np.logical_and(stipples_mask, zeros_mask))
# print np.any(np.logical_and(third_cat_mask, zeros_mask))
# print np.any(np.logical_and(third_cat_mask, stipples_mask))
<<<<<<< HEAD
plt.savefig('../dist/' + outfilename.format('categories'), format='eps', dpi=200)
# plt.show()
=======
# plt.savefig('../dist/' + outfilename.format('categories'), format='eps', dpi=200)
plt.show()
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
......@@ -44,28 +44,14 @@ for infilename, outfilename in GLOBAL_PRECIP_FILES:
lons, lats = meshgrid(lon_data, lat_data)
x,y = m(lons, lats)
<<<<<<< HEAD
levels_15 = [-45, -30, -15, 0, 15, 30, 45]
levels_10 = [-30, -20, -10, 0, 10, 20, 30]
prcp = m.contourf(x, y, data, cmap=plt.get_cmap('BrBG'), levels=levels_10, extend='both')
=======
levels_15 = [-45, -30, -15, 0, 15, 30, 45]
levels_10 = [-30, -20, -10, 0, 10, 20, 30]
prcp = m.contourf(x, y, data, cmap=plt.get_cmap('BrBG'), levels=levels_15, extend='both')
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
m.colorbar(prcp, location='bottom',pad="5%")
hatching = m.contourf(x,y, signif, 1, colors='none',hatches=[None, '//'])
# m.colorbar(hatching,location='right',pad="5%")
<<<<<<< HEAD
plt.savefig('../dist/' + outfilename.format('percent_change'), format='eps', dpi=200)
# plt.show()
=======
# plt.savefig('../dist/' + outfilename.format('percent_change'), format='eps', dpi=200)
plt.show()
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
......@@ -5,16 +5,11 @@ Created on Nov 27, 2012
"""
from scipy.io.netcdf import netcdf_file
from mpl_toolkits.basemap import Basemap
<<<<<<< HEAD
import numpy as np
=======
from numpy import meshgrid
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
import matplotlib.pyplot as plt
from config import NA_PRECIP_FILES, SEASONS
<<<<<<< HEAD
map_proj = {
'projection': 'aea',
'lon_0': -96,
......@@ -144,53 +139,3 @@ def plot_category_map():
# plt.show()
main()
=======
for infilename, outfilename in NA_PRECIP_FILES:
nc = netcdf_file(infilename)
lat_data = nc.variables['lat'].data
lon_data = nc.variables['lon'].data
fig = plt.figure(figsize=(25,16), dpi=100, tight_layout=True)
for i, season in enumerate(['Winter', 'Spring', 'Summer', 'Fall']):
data_var, signif_var = SEASONS[season]
data = nc.variables[data_var].data
signif = nc.variables[signif_var].data
ax = fig.add_subplot(221 + i)
plt.title(season)
m = Basemap(
projection='aea',
lon_0=-96,
lat_0=37.5,
lat_1=29.5,
lat_2=45.5,
# lat_ts=median(lats),
llcrnrlat=10,
urcrnrlat=40,
llcrnrlon=-160,
urcrnrlon=-158,
resolution='l',area_thresh=1
)
m.drawcoastlines()
m.drawstates()
m.drawcountries()
lons, lats = meshgrid(lon_data, lat_data)
x,y = m(lons, lats)
levels_10 = [-30, -20, -10, 0, 10, 20, 30]
prcp = m.contourf(x, y, data, cmap=plt.get_cmap('BrBG'), levels=levels_10, extend='both')
m.colorbar(prcp, location='bottom',pad="5%")
hatching = m.contourf(x,y, signif, 1, colors='none',hatches=[None, '//'])
# m.colorbar(hatching,location='right',pad="5%")
plt.savefig('../dist/' + outfilename.format('HI_percent_change'), format='eps', dpi=200)
# plt.show()
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
......@@ -31,26 +31,15 @@ for infilename, outfilename in NA_PRECIP_FILES:
lat_data = nc.variables['lat'].data
lon_data = nc.variables['lon'].data
<<<<<<< HEAD
fig = plt.figure(figsize=(25,16), dpi=100, tight_layout=True)
for i, season in enumerate(['Winter', 'Spring', 'Summer', 'Fall']): #['Winter', 'Spring', 'Summer', 'Fall']
=======
fig = plt.figure(figsize=(25,16), dpi=100, tight_layout=True)
for i, season in enumerate(['Winter', 'Spring', 'Summer', 'Fall']):
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
data_var, signif_var = SEASONS[season]
data = nc.variables[data_var].data
signif = nc.variables[signif_var].data
<<<<<<< HEAD
=======
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
ax = fig.add_subplot(231 + i)
plt.title(season)
m = Basemap(
......@@ -70,7 +59,6 @@ for infilename, outfilename in NA_PRECIP_FILES:
m.drawcoastlines()
m.drawstates()
m.drawcountries()
<<<<<<< HEAD
lons, lats = meshgrid(lon_data, lat_data)
......@@ -85,11 +73,6 @@ for infilename, outfilename in NA_PRECIP_FILES:
x, y = m(lons, lats)
x_shifted, y_shifted = m(lons_shifted, lats_shifted)
=======
lons, lats = meshgrid(lon_data, lat_data)
x,y = m(lons, lats)
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
#Build boolean masks of the gridpoint for each category
stipples_mask = np.ma.getmask(np.ma.masked_equal(signif, 1.))
......@@ -105,7 +88,6 @@ for infilename, outfilename in NA_PRECIP_FILES:
data.mask = zeros_mask
data = np.ma.masked_array(data.filled(1.0)) #1.0 denotes areas little change; blue
<<<<<<< HEAD
data.mask = third_cat_mask
data = np.ma.masked_array(data.filled(2.0)) #2.0 denotes areas of statistical uncertainty; green
......@@ -115,26 +97,12 @@ for infilename, outfilename in NA_PRECIP_FILES:
m.scatter(x, y, 3, marker='o')
=======
data.mask = third_cat_mask
data = np.ma.masked_array(data.filled(2.0)) #2.0 denotes areas of statistical uncertainty; green
weird = m.pcolor(x,y, data)
m.colorbar(weird,location='right',pad="5%")
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
#Tests for overlap (There shouldn't be any)
# print np.any(np.logical_and(stipples_mask, zeros_mask))
# print np.any(np.logical_and(third_cat_mask, zeros_mask))
# print np.any(np.logical_and(third_cat_mask, stipples_mask))
<<<<<<< HEAD
# plt.savefig('../dist/' + outfilename.format('categories'), format='eps', dpi=200)
=======
# plt.savefig('../dist/' + outfilename.format('north_american_categories'), format='eps', dpi=200)
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
plt.show()
......@@ -24,12 +24,8 @@ for infilename, outfilename in NA_PRECIP_FILES:
lon_data = nc.variables['lon'].data
fig = plt.figure(figsize=(25,16), dpi=100, tight_layout=True)
<<<<<<< HEAD
for i, season in enumerate(['Winter', 'Spring', 'Summer', 'Fall']): #['Winter', 'Spring', 'Summer', 'Fall']
=======
for i, season in enumerate(['Winter', 'Spring', 'Summer', 'Fall']):
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
data_var, signif_var = SEASONS[season]
data = nc.variables[data_var].data
......@@ -57,7 +53,6 @@ for infilename, outfilename in NA_PRECIP_FILES:
lons, lats = meshgrid(lon_data, lat_data)
x,y = m(lons, lats)
<<<<<<< HEAD
levels_10 = [-30, -20, -10, 0, 10, 20, 30]
......@@ -65,15 +60,7 @@ for infilename, outfilename in NA_PRECIP_FILES:
m.colorbar(prcp, location='bottom',pad="5%")
hatching = m.contourf(x, y, signif, 1, colors='none', hatches=[None, '//'])
=======
levels_10 = [-30, -20, -10, 0, 10, 20, 30]
prcp = m.contourf(x, y, data, cmap=plt.get_cmap('BrBG'), levels=levels_10, extend='both')
m.colorbar(prcp, location='bottom',pad="5%")
hatching = m.contourf(x,y, signif, 1, colors='none',hatches=[None, '//'])
>>>>>>> dbd7e5f7b2a0c4d96dda5d2c140efbb6a052e6ad
# m.colorbar(hatching,location='right',pad="5%")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment