Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Python 3 builtins via six #1699

Merged
merged 5 commits into from
Jun 22, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/iris/example_code/General/custom_file_loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def load_NAME_III(filename):

# loading a file gives a generator of lines which can be progressed using the next() method.
# This will come in handy as we wish to progress through the file line by line.
file_handle = file(filename)
file_handle = open(filename)

# define a dictionary which can hold the header metadata about this file
headers = {}
Expand Down
6 changes: 4 additions & 2 deletions docs/iris/example_code/Meteorology/COP_maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,9 @@


"""
from six.moves import zip

import os.path
import itertools
import matplotlib.pyplot as plt
import numpy as np

Expand Down Expand Up @@ -65,7 +66,8 @@ def main():


# Iterate over each latitude longitude slice for both e1 and a1b scenarios simultaneously
for e1_slice, a1b_slice in itertools.izip(e1.slices(['latitude', 'longitude']), a1b.slices(['latitude', 'longitude'])):
for e1_slice, a1b_slice in zip(e1.slices(['latitude', 'longitude']),
a1b.slices(['latitude', 'longitude'])):

time_coord = a1b_slice.coord('time')

Expand Down
3 changes: 2 additions & 1 deletion docs/iris/src/sphinxext/generate_package_rst.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# (C) British Crown Copyright 2010 - 2014, Met Office
# (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
Expand All @@ -16,6 +16,7 @@
# along with Iris. If not, see <http://www.gnu.org/licenses/>.

from __future__ import (absolute_import, division, print_function)
from six.moves import filter

import os
import sys
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@

from __future__ import (absolute_import, division, print_function)
from six.moves import zip

import iris
import iris.analysis
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ def load_cubes(uris, constraints=None, callback=None):
collection = _load_collection(uris, constraints, callback).merged()

# Make sure we have exactly one merged cube per constraint
bad_pairs = filter(lambda pair: len(pair) != 1, collection.pairs)
bad_pairs = [pair for pair in collection.pairs if len(pair) != 1]
if bad_pairs:
fmt = ' {} -> {} cubes'
bits = [fmt.format(pair.constraint, len(pair)) for pair in bad_pairs]
Expand Down
1 change: 1 addition & 0 deletions lib/iris/_concatenate.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import range, zip

from collections import defaultdict, namedtuple

Expand Down
3 changes: 2 additions & 1 deletion lib/iris/_constraints.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# (C) British Crown Copyright 2010 - 2014, Met Office
# (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
Expand All @@ -20,6 +20,7 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import range, zip

import collections
import operator
Expand Down
3 changes: 2 additions & 1 deletion lib/iris/_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import map, range, zip

from collections import namedtuple, OrderedDict
from copy import deepcopy
Expand Down Expand Up @@ -1171,7 +1172,7 @@ def merge(self, unique=True):
self._report_duplicate(nd_indexes, group_by_nd_index)

# Generate group-depth merged cubes from the source-cubes.
for level in xrange(group_depth):
for level in range(group_depth):
# Stack up all the data from all of the relevant source
# cubes in a single biggus ArrayStack.
# If it turns out that all the source cubes already had
Expand Down
7 changes: 4 additions & 3 deletions lib/iris/analysis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import filter, range, zip

import collections

Expand Down Expand Up @@ -92,8 +93,8 @@ def _first_coord_w_cube(self):
as (cube, coord).

"""
return filter(lambda cube_coord: cube_coord[1] is not None,
zip(self.cubes, self.coords))[0]
return next(filter(lambda cube_coord: cube_coord[1] is not None,
zip(self.cubes, self.coords)))

def __repr__(self):
# No exact repr, so a helpful string is given instead
Expand Down Expand Up @@ -1074,7 +1075,7 @@ def interp_order(length):
column_peaks.append(column[0])
continue

tck = scipy.interpolate.splrep(range(column.size), column, k=k)
tck = scipy.interpolate.splrep(np.arange(column.size), column, k=k)
npoints = column.size * 100
points = np.linspace(0, column.size - 1, npoints)
spline = scipy.interpolate.splev(points, tck)
Expand Down
7 changes: 4 additions & 3 deletions lib/iris/analysis/_interpolation.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# (C) British Crown Copyright 2014, Met Office
# (C) British Crown Copyright 2014 - 2015, Met Office
#
# This file is part of Iris.
#
Expand All @@ -17,6 +17,7 @@
"""A collection of helpers for interpolation."""

from __future__ import (absolute_import, division, print_function)
from six.moves import range, zip

from collections import namedtuple
from itertools import product
Expand Down Expand Up @@ -500,7 +501,7 @@ def _points(self, sample_points, data, data_dims=None):
instance of the interpolated data.

"""
dims = range(self._src_cube.ndim)
dims = list(range(self._src_cube.ndim))
data_dims = data_dims or dims

if len(data_dims) != data.ndim:
Expand All @@ -514,7 +515,7 @@ def _points(self, sample_points, data, data_dims=None):
raise NotImplementedError(msg)

# Broadcast the data into the shape of the original cube.
if data_dims != range(self._src_cube.ndim):
if data_dims != list(range(self._src_cube.ndim)):
strides = list(data.strides)
for dim in range(self._src_cube.ndim):
if dim not in data_dims:
Expand Down
3 changes: 2 additions & 1 deletion lib/iris/analysis/_scipy_interpolate.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@

from __future__ import (absolute_import, division, print_function)
from six.moves import range, zip

import numpy as np

Expand All @@ -22,7 +23,7 @@ def _ndim_coords_from_arrays(points, ndim=None):
points = points[0]
if isinstance(points, tuple):
p = np.broadcast_arrays(*points)
for j in xrange(1, len(p)):
for j in range(1, len(p)):
if p[j].shape != p[0].shape:
raise ValueError(
"coordinate arrays do not have the same shape")
Expand Down
1 change: 1 addition & 0 deletions lib/iris/analysis/calculus.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import filter, zip

import re
import warnings
Expand Down
20 changes: 10 additions & 10 deletions lib/iris/analysis/cartography.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import zip

import copy
import itertools
import warnings

import numpy as np
Expand Down Expand Up @@ -108,10 +108,10 @@ def rotate_pole(lons, lats, pole_lon, pole_lat):


def _get_lat_lon_coords(cube):
lat_coords = filter(lambda coord: "latitude" in coord.name(),
cube.coords())
lon_coords = filter(lambda coord: "longitude" in coord.name(),
cube.coords())
lat_coords = [coord for coord in cube.coords()
if "latitude" in coord.name()]
lon_coords = [coord for coord in cube.coords()
if "longitude" in coord.name()]
if len(lat_coords) > 1 or len(lon_coords) > 1:
raise ValueError(
"Calling _get_lat_lon_coords() with multiple lat or lon coords"
Expand Down Expand Up @@ -372,7 +372,7 @@ def area_weights(cube, normalize=False):
# Now we create an array of weights for each cell. This process will
# handle adding the required extra dimensions and also take care of
# the order of dimensions.
broadcast_dims = filter(lambda x: x is not None, (lat_dim, lon_dim))
broadcast_dims = [x for x in (lat_dim, lon_dim) if x is not None]
wshape = []
for idim, dim in zip((0, 1), (lat_dim, lon_dim)):
if dim is not None:
Expand Down Expand Up @@ -421,8 +421,8 @@ def cosine_latitude_weights(cube):

"""
# Find all latitude coordinates, we want one and only one.
lat_coords = filter(lambda coord: "latitude" in coord.name(),
cube.coords())
lat_coords = [coord for coord in cube.coords()
if "latitude" in coord.name()]
if len(lat_coords) > 1:
raise ValueError("Multiple latitude coords are currently disallowed.")
try:
Expand Down Expand Up @@ -455,7 +455,7 @@ def cosine_latitude_weights(cube):

# Create weights for each grid point. This operation handles adding extra
# dimensions and also the order of the dimensions.
broadcast_dims = filter(lambda x: x is not None, lat_dims)
broadcast_dims = [x for x in lat_dims if x is not None]
wshape = []
for idim, dim in enumerate(lat_dims):
if dim is not None:
Expand Down Expand Up @@ -641,7 +641,7 @@ def project(cube, target_proj, nx=None, ny=None):

# Step through cube data, regrid onto desired projection and insert results
# in new_data array
for index, ll_slice in itertools.izip(index_it, slice_it):
for index, ll_slice in zip(index_it, slice_it):
# Regrid source data onto target grid
index = list(index)
index[xdim] = slice(None, None)
Expand Down
3 changes: 2 additions & 1 deletion lib/iris/analysis/geometry.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# (C) British Crown Copyright 2010 - 2014, Met Office
# (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
Expand All @@ -23,6 +23,7 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import range

import warnings

Expand Down
7 changes: 4 additions & 3 deletions lib/iris/analysis/interpolate.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import map, range, zip

import collections
import warnings
Expand Down Expand Up @@ -58,7 +59,7 @@ def _cartesian_sample_points(sample_points, sample_point_coord_names):

# Find lat and lon coord indices
i_lat = i_lon = None
i_non_latlon = range(len(sample_point_coord_names))
i_non_latlon = list(range(len(sample_point_coord_names)))
for i, name in enumerate(sample_point_coord_names):
if "latitude" in name:
i_lat = i
Expand Down Expand Up @@ -730,7 +731,7 @@ def __call__(self, requested_x):

r = self._interpolator(requested_x[ok])
# Reshape the properly formed array to put the interpolator.axis last i.e. dims 0, 1, 2 -> 0, 2, 1 if axis = 1
axes = range(r.ndim)
axes = list(range(r.ndim))
del axes[self._interpolator.axis]
axes.append(self._interpolator.axis)

Expand All @@ -748,7 +749,7 @@ def __call__(self, requested_x):
grad = (self.y[..., -1:] - self.y[..., -2:-1]) / (self.x[-1] - self.x[-2])
result[interpolator_result_index] = self.y[..., -1:] + (requested_x[gt] - self.x[-1]) * grad

axes = range(len(interpolator_result_index))
axes = list(range(len(interpolator_result_index)))
axes.insert(self._interpolator.axis, axes.pop(axes[-1]))
result = result.transpose(axes)

Expand Down
1 change: 1 addition & 0 deletions lib/iris/analysis/maths.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import filter

import warnings
import math
Expand Down
6 changes: 4 additions & 2 deletions lib/iris/analysis/trajectory.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# (C) British Crown Copyright 2010 - 2014, Met Office
# (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
Expand All @@ -20,6 +20,7 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import range

import math

Expand Down Expand Up @@ -76,7 +77,8 @@ def __init__(self, waypoints, sample_count=10):
self.sample_count = sample_count

# create line segments from the waypoints
segments = [_Segment(self.waypoints[i], self.waypoints[i+1]) for i in range(len(self.waypoints) - 1)]
segments = [_Segment(self.waypoints[i], self.waypoints[i+1])
for i in range(len(self.waypoints) - 1)]

# calculate our total length
self.length = sum([seg.length for seg in segments])
Expand Down
5 changes: 3 additions & 2 deletions lib/iris/aux_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import map, range, zip

from abc import ABCMeta, abstractmethod, abstractproperty
import warnings
Expand Down Expand Up @@ -288,7 +289,7 @@ def _nd_points(coord, dims, ndim):
sorted_pairs = sorted(enumerate(dims), key=lambda pair: pair[1])
transpose_order = [pair[0] for pair in sorted_pairs]
points = coord._points
if dims and transpose_order != range(len(dims)):
if dims and transpose_order != list(range(len(dims))):
points = points.transpose(transpose_order)

# Expand dimensionality to be consistent with the Cube.
Expand Down Expand Up @@ -322,7 +323,7 @@ def _remap(self, dependency_dims, derived_dims):
# no transpose is needed.
if derived_dims:
keys = tuple(slice(None) if dim in derived_dims else 0 for
dim in xrange(ndim))
dim in range(ndim))
nd_points = nd_points[keys]
else:
# If no coord, treat value as zero.
Expand Down
12 changes: 7 additions & 5 deletions lib/iris/coord_categorisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"""

from __future__ import (absolute_import, division, print_function)
from six.moves import range

import calendar
import collections
Expand Down Expand Up @@ -216,7 +217,7 @@ def _months_in_season(season):
# Can't match the season, raise an error.
raise ValueError('unrecognised season: {!s}'.format(season))
m1 = m0 + len(season)
return map(lambda month: (month % 12) + 1, range(m0, m1))
return [(month % 12) + 1 for month in range(m0, m1)]


def _validate_seasons(seasons):
Expand All @@ -233,13 +234,13 @@ def _validate_seasons(seasons):
for season in seasons:
c.update(_months_in_season(season))
# Make a list of months that are not present...
not_present = [calendar.month_abbr[month] for month in xrange(1, 13)
not_present = [calendar.month_abbr[month] for month in range(1, 13)
if month not in c]
if not_present:
raise ValueError('some months do not appear in any season: '
'{!s}'.format(', '.join(not_present)))
# Make a list of months that appear multiple times...
multi_present = [calendar.month_abbr[month] for month in xrange(1, 13)
multi_present = [calendar.month_abbr[month] for month in range(1, 13)
if c[month] > 1]
if multi_present:
raise ValueError('some months appear in more than one season: '
Expand All @@ -258,8 +259,9 @@ def _month_year_adjusts(seasons):
month_year_adjusts = [None, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
for season in seasons:
months = _months_in_season(season)
for month in filter(lambda m: m > months[-1], months):
month_year_adjusts[month] = 1
for month in months:
if month > months[-1]:
month_year_adjusts[month] = 1
return month_year_adjusts


Expand Down
Loading