Skip to content

Commit

Permalink
Misc. typos (OSGeo#668)
Browse files Browse the repository at this point in the history
Found via `codespell -q 3 -I ../gdal-word-whitelist.txt` where whitelist consists of:
```
als
ang
ans
dum
geometrie
iff
lod
merget
nd
objext
oder
oposition
pres
poiter
posession
repid
ressource
sinc
som
te
templat
titel
```
  • Loading branch information
luzpaz authored and rouault committed Jun 7, 2018
1 parent 877b237 commit 6ef1319
Show file tree
Hide file tree
Showing 90 changed files with 153 additions and 153 deletions.
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ for new features.
## Expected behavior and actual behavior.

For example: I expected to be able to open this raster file (with a link to
the raster file, or it as an attachement) and it returns an error message
the raster file, or it as an attachment) and it returns an error message
instead.

## Steps to reproduce the problem.
Expand Down
6 changes: 3 additions & 3 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ Git workflows with GDAL
=======================

This is not a git tutorial or reference manual by any means. This just collects
a few best practice for git usage for GDAL developement.
a few best practice for git usage for GDAL development.

Commit message
--------------
Expand Down Expand Up @@ -82,7 +82,7 @@ git fetch origin
git rebase origin/master
# At end of your work, make sure history is reasonable by folding non
# significant commits into a consistant set
# significant commits into a consistent set
git rebase -i master (use fixup for example to merge several commits together)
# push your branch
Expand All @@ -92,7 +92,7 @@ From GitHub UI, issue a pull request

If the pull request discussion or Travis-CI/AppVeyor checks require changes,
commit locally and push. To get a reasonable history, you may need to
```git rebase -i master```, in whish case you will have to force-push your
```git rebase -i master```, in which case you will have to force-push your
branch with ```git push -f my_user_name my_new_feature_branch```


Expand Down
2 changes: 1 addition & 1 deletion autotest/cpp/tut/tut_exception.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ struct failure : public tut_error
};

/**
* Exception to be throwed when test desctructor throwed an exception.
* Exception to be thrown when test destructor threw an exception.
*/
struct warning : public tut_error
{
Expand Down
4 changes: 2 additions & 2 deletions autotest/gcore/tiff_ovr.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def tiff_ovr_check(src_ds):
return 'fail'

if ovr_band.Checksum() != 1087:
msg = 'overview wrong checkum: band %d, overview 0, checksum = %d,' % (i, ovr_band.Checksum())
msg = 'overview wrong checksum: band %d, overview 0, checksum = %d,' % (i, ovr_band.Checksum())
gdaltest.post_reason(msg)
return 'fail'

Expand All @@ -70,7 +70,7 @@ def tiff_ovr_check(src_ds):
return 'fail'

if ovr_band.Checksum() != 328:
msg = 'overview wrong checkum: band %d, overview 1, checksum = %d,' % (i, ovr_band.Checksum())
msg = 'overview wrong checksum: band %d, overview 1, checksum = %d,' % (i, ovr_band.Checksum())
gdaltest.post_reason(msg)
return 'fail'
return 'success'
Expand Down
4 changes: 2 additions & 2 deletions autotest/gcore/tiff_write.py
Original file line number Diff line number Diff line change
Expand Up @@ -921,7 +921,7 @@ def tiff_write_20():
md = new_ds.GetMetadata()
for item in values:
if item[0] not in md:
gdaltest.post_reason('Couldnt find tag %s' % (item[0]))
gdaltest.post_reason('Could not find tag %s' % (item[0]))
return 'fail'

if md[item[0]] != item[1]:
Expand Down Expand Up @@ -5614,7 +5614,7 @@ def tiff_write_128():

gdaltest.tiff_drv.Delete('/vsimem/tiff_write_128.tif')

# Try with explicit CMYK photometric interpreation
# Try with explicit CMYK photometric interpretation
old_val = gdal.GetConfigOption('GDAL_PAM_ENABLED')
gdal.SetConfigOption('GDAL_PAM_ENABLED', 'NO')
ds = gdaltest.tiff_drv.CreateCopy('/vsimem/tiff_write_128.tif', src_ds, options=['COMPRESS=JPEG', 'PHOTOMETRIC=CMYK'])
Expand Down
4 changes: 2 additions & 2 deletions autotest/gdrivers/data/load_postgisraster_test_data.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash
# If you are working in a Linux/Mac enviroment, execute this script with "sh load_postgisraster_test_data"
# If you are working in a Linux/Mac environment, execute this script with "sh load_postgisraster_test_data"
#
# TODO: a Python version of this script, to have a system-independent script
#
Expand All @@ -15,7 +15,7 @@
# configuration file (pg_hba.conf) to allow the user "trust" authentication
# method. See http://www.postgresql.org/docs/8.4/interactive/auth-pg-hba-conf.html

# CHANGE THIS TO MATCH YOUR ENVIROMENT
# CHANGE THIS TO MATCH YOUR ENVIRONMENT

SQL_OUTPUT_FILES_PATH=/tmp/gdal-autotest-pgraster

Expand Down
4 changes: 2 additions & 2 deletions autotest/gdrivers/data/test_validate_jp2/byte_corrupted.xml
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,9 @@
<gml:axisName>y</gml:axisName>
<gml:origin>
<!--<gml:Point gml:id="P0001" srsName="urn:ogc:def:crs:EPSG::26711">-->
<gml:Point gml:id="P0001" srsName="urn:ogc:def:crs:EPSG::32631"> <!-- ERROR[GENERAL]: Inconsistant SRS between ... -->
<gml:Point gml:id="P0001" srsName="urn:ogc:def:crs:EPSG::32631"> <!-- ERROR[GENERAL]: Inconsistent SRS between ... -->
<!--<gml:pos>440750 3751290</gml:pos>-->
<gml:pos>-440750 -3751290</gml:pos> <!-- ERROR[GENERAL]: Inconsistant geotransform between GeoJP2 ((440720.0, 60.0, 0.0, 3751320.0, 0.0, -60.0)) and GMLJP2 ((-440780.0, 60.0, 0.0, -3751260.0, 0.0, -60.0)) -->
<gml:pos>-440750 -3751290</gml:pos> <!-- ERROR[GENERAL]: Inconsistent geotransform between GeoJP2 ((440720.0, 60.0, 0.0, 3751320.0, 0.0, -60.0)) and GMLJP2 ((-440780.0, 60.0, 0.0, -3751260.0, 0.0, -60.0)) -->
</gml:Point>
</gml:origin>
<gml:offsetVector srsName="urn:ogc:def:crs:EPSG::32631">60 0</gml:offsetVector>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@
<gml:origin>
<gml:Point gml:id="P0001" srsName="urn:ogc:def:crs:EPSG::26711">
<!--<gml:pos>440750 3751290</gml:pos>-->
<gml:pos>40750 3751290</gml:pos> <!-- 'ERROR[INSPIRE_TG, Conformance class A.8.8]: Inconsistant geotransform between OrthoImagery ((440720.0, 60.0, 0.0, 3751320.0, 0.0, -60.0)) and GMLJP2/GeoJP2 ((40720.0, 60.0, 0.0, 3751320.0, 0.0, -60.0))', -->
<gml:pos>40750 3751290</gml:pos> <!-- 'ERROR[INSPIRE_TG, Conformance class A.8.8]: Inconsistent geotransform between OrthoImagery ((440720.0, 60.0, 0.0, 3751320.0, 0.0, -60.0)) and GMLJP2/GeoJP2 ((40720.0, 60.0, 0.0, 3751320.0, 0.0, -60.0))', -->
</gml:Point>
</gml:origin>
<gml:offsetVector srsName="urn:ogc:def:crs:EPSG::26711">60 0</gml:offsetVector>
Expand Down
2 changes: 1 addition & 1 deletion autotest/gdrivers/grib.py
Original file line number Diff line number Diff line change
Expand Up @@ -1484,7 +1484,7 @@ def grib_grib2_write_data_encodings_warnings_and_errors():
return 'success'

###############################################################################
# Test writing temperatures with automatic Celcius -> Kelvin conversion
# Test writing temperatures with automatic Celsius -> Kelvin conversion


def grib_grib2_write_temperatures():
Expand Down
2 changes: 1 addition & 1 deletion autotest/gdrivers/pds4.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,7 +305,7 @@ def pds4_9():
ds = None

filename = '/vsimem/out.xml'
# Test copy of all specialConstants and overide noData
# Test copy of all specialConstants and override noData
for frmt in ['RAW', 'GEOTIFF']:
with hide_substitution_warnings_error_handler():
gdal.Translate(filename, 'data/byte_pds4.xml', format='PDS4',
Expand Down
4 changes: 2 additions & 2 deletions autotest/gdrivers/rmf.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def rmf_11():
return 'fail'

if ovr_band.Checksum() != ovr_checksum[i]:
msg = 'overview wrong checkum: overview %d, checksum = %d,' % \
msg = 'overview wrong checksum: overview %d, checksum = %d,' % \
(i, ovr_band.Checksum())
gdaltest.post_reason(msg)
return 'fail'
Expand Down Expand Up @@ -293,7 +293,7 @@ def rmf_build_ov(source, testid, options, ov_sizes, crs, reopen=False, pass_coun
return 'fail'

if ovr_band.Checksum() != crs[iOverview][iBand]:
msg = 'overview wrong checkum: band %d, overview %d, checksum = %d,' % \
msg = 'overview wrong checksum: band %d, overview %d, checksum = %d,' % \
(iBand, iOverview, ovr_band.Checksum())
gdaltest.post_reason(msg)
return 'fail'
Expand Down
4 changes: 2 additions & 2 deletions autotest/ogr/ogr_cad.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ def ogr_cad_4():
feat = gdaltest.cad_layer.GetNextFeature()

if ogrtest.check_feature_geometry(feat, 'POINT (50 50 0)'):
gdaltest.post_reason('got feature which doesnot fit expectations.')
gdaltest.post_reason('got feature which does not fit expectations.')
return 'fail'

gdaltest.cad_ds = None
Expand Down Expand Up @@ -345,7 +345,7 @@ def ogr_cad_5():
feat = gdaltest.cad_layer.GetNextFeature()

if ogrtest.check_feature_geometry(feat, 'LINESTRING (50 50 0,100 100 0)'):
gdaltest.post_reason('got feature which doesnot fit expectations.')
gdaltest.post_reason('got feature which does not fit expectations.')
return 'fail'

gdaltest.cad_ds = None
Expand Down
4 changes: 2 additions & 2 deletions autotest/ogr/ogr_geojson.py
Original file line number Diff line number Diff line change
Expand Up @@ -3732,7 +3732,7 @@ def ogr_geojson_62():
gdaltest.post_reason('failure')
return 'fail'

# crs type=EPSG (not even documented in GJ2008 spec!) tests. Just for coverage completness
# crs type=EPSG (not even documented in GJ2008 spec!) tests. Just for coverage completeness
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"EPSG" }, "features":[] }""")

gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"EPSG", "properties":null }, "features":[] }""")
Expand Down Expand Up @@ -3769,7 +3769,7 @@ def ogr_geojson_62():
with gdaltest.error_handler():
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"link", "properties":{"href": "1"} }, "features":[] }""")

# crs type=OGC (not even documented in GJ2008 spec!) tests. Just for coverage completness
# crs type=OGC (not even documented in GJ2008 spec!) tests. Just for coverage completeness
gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"OGC" }, "features":[] }""")

gdal.OpenEx("""{ "type": "FeatureCollection", "crs": { "type":"OGC", "properties":null }, "features":[] }""")
Expand Down
2 changes: 1 addition & 1 deletion autotest/ogr/ogr_ili.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@ def ogr_interlis1_7():
feat = lyr.GetNextFeature()

# Interlis 1 Encoding is ISO 8859-1 (Latin1)
# Pyton source code is UTF-8 encoded
# Python source code is UTF-8 encoded
field_values = [0, 'äöü', 'ÄÖÜ', '', 1]

if feat.GetFieldCount() != len(field_values):
Expand Down
2 changes: 1 addition & 1 deletion autotest/ogr/ogr_kml.py
Original file line number Diff line number Diff line change
Expand Up @@ -928,7 +928,7 @@ def ogr_kml_read_placemark_with_kml_prefix():
return 'success'

###############################################################################
# Test reading KML with dumplicated folder name
# Test reading KML with duplicated folder name


def ogr_kml_read_duplicate_folder_name():
Expand Down
2 changes: 1 addition & 1 deletion autotest/ogr/ogr_libkml.py
Original file line number Diff line number Diff line change
Expand Up @@ -2104,7 +2104,7 @@ def ogr_libkml_read_placemark_with_kml_prefix():
return 'success'

###############################################################################
# Test reading KML with dumplicated folder name
# Test reading KML with duplicated folder name


def ogr_libkml_read_duplicate_folder_name():
Expand Down
2 changes: 1 addition & 1 deletion autotest/ogr/ogr_shape.py
Original file line number Diff line number Diff line change
Expand Up @@ -2131,7 +2131,7 @@ def ogr_shape_50():
return 'success'

###############################################################################
# Test that we can add a field when there's no dbf file initialy
# Test that we can add a field when there's no dbf file initially


def ogr_shape_51():
Expand Down
2 changes: 1 addition & 1 deletion autotest/utilities/test_ogr2ogr.py
Original file line number Diff line number Diff line change
Expand Up @@ -2745,7 +2745,7 @@ def check_identity_transformation(x, y, srid):
ds.Destroy()

if ok:
# Now, transforming SHP to SHP will have a different definition of the SRS (EPSG:srid) which comes from the previouly saved .prj file
# Now, transforming SHP to SHP will have a different definition of the SRS (EPSG:srid) which comes from the previously saved .prj file
# For angular units in degrees the .prj is saved with greater precision than the internally used value.
# We perform this additional transformation to exercise the case of units defined with different precision
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + " tmp/output_point2.shp tmp/output_point.shp -t_srs EPSG:%(srid)d" % locals())
Expand Down
18 changes: 9 additions & 9 deletions gdal/NEWS
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ Trac tickets. GitHub tickets are explicitly indicated with github #XXXX
- RDA: DigitalGlobe Raster Data Access (read-only)
* New OGR drivers:
- MVT: add read/write driver for Mapbox Vector Tile standalone files or tilesets
- ESRIJson and TopoJSON: read-only, splitted from existing code of the GeoJSON driver
- ESRIJson and TopoJSON: read-only, split from existing code of the GeoJSON driver
- WFS3: *experimental* read-only driver
* RFC 68: Make C++11 a build requirement
https://trac.osgeo.org/gdal/wiki/rfc68_cplusplus11
Expand Down Expand Up @@ -1611,7 +1611,7 @@ Shapefile driver:
* auto-repack by default at dataset closing and FlushCache()/SyncToDisk() time. Controlled by AUTO_REPACK open and layer creation options (that default to YES)
* generate .dbf end-of-file 0x1A character by default. Add DBF_EOF_CHAR layer creation options / open options to control that behaviour
* writing: use strerrno() for better error messages (QGIS #13468)
* change REPACK implementation on Windows to be robust to remaining file descriptors openend on the .shp/.shx/.dbf (#6672, QGIS #15570)
* change REPACK implementation on Windows to be robust to remaining file descriptors opened on the .shp/.shx/.dbf (#6672, QGIS #15570)
* Fix issue in DBFCloneEmpty() one a one field DBF not yet written to disk
* add call to AutoIdentifyEPSG() when reading a .prj
* support reading .dbf with substantial padding after last field definition.
Expand Down Expand Up @@ -3582,7 +3582,7 @@ GRASS driver:
* fix compilation issues for GRASS 7

GRIB driver:
* display temperature unit as deg Celcius in metadata (#3606)
* display temperature unit as deg Celsius in metadata (#3606)

GTiff driver:
* when compiling against internal libtiff, in read-only mode, optimization to
Expand All @@ -3592,7 +3592,7 @@ GTiff driver:
* fix potential issues in gt_citation.cpp / CheckUTM()
* upgrade internal libtiff to latest CVS
* implement reading and writing of ICC profiles (#5246)
* make SetColorInterpretation() round-trip with GetColorInterpreation();
* make SetColorInterpretation() round-trip with GetColorInterpretation();
read color interpretation from PAM if it exists (overrides internal tiff color interpretation);
set TIFFTAG_PHOTOMETRIC=PHOTOMETRIC_RGB if calling SetColorInterpretation() with R,G,B and no explicit PHOTOMETRIC creation option defined
* gt_wkt_srs.cpp: fix compilation with external libgeotiff. The file is dependent of quite a few CPL stuff, don't try to pretend otherwise
Expand Down Expand Up @@ -3996,7 +3996,7 @@ OSM driver:
* support relations with more than 2000 members in a OSM XML file (#5055)
* make the driver work with PBF files produced by osmconvert.
* osmconf.ini: report the waterway attribute for the lines layer (#5056)
* add an option in osmconf.ini to enable creating a 'all_tags' field, combining both fields specificaly identified, and other tags
* add an option in osmconf.ini to enable creating a 'all_tags' field, combining both fields specifically identified, and other tags
* always use quoting of key/values in other_tags field (#5096)
* use alternative implementation of FindNode() that is a bit more efficient when process is CPU-bound
* fix issue with attribute filtering
Expand Down Expand Up @@ -6922,7 +6922,7 @@ Utilities :
- Add -separate, -allow_projection_difference, -te, -tr, -q, -addalpha options
- Add -srcnodata and -vrtnodata options (#3254)
- Add -hidenodata option (#3327)
- Avoid accidental overwriting of a non VRT dataset due to revered filename order
- Avoid accidental overwriting of a non VRT dataset due to reversed filename order
- Fix -resolution lowest/highest (#3198)
* gdaladdo: add -clean option (#2915)
* gdaladdo: add -q option
Expand Down Expand Up @@ -7139,7 +7139,7 @@ JP2KAK (Kakadu) Driver :
Fixed _WriteTile() lossless 16bit case to avoid improper 32K offset.
Added support for NBITS image structure metadata, and creation option.
* Added logic to limit tiles to 64K due to jpeg2000 limitation. (ESRI Merge)
* Fix offseting of 16U buf32 data (#3027)
* Fix offsetting of 16U buf32 data (#3027)
* Support 16u/16s imagery through DirectRasterIO interface (#3049)
* Support external overviews as an override to internal overviews
* Rework jp2kak support to use natural kakadu builds (Windows build)
Expand Down Expand Up @@ -7894,7 +7894,7 @@ L1B driver:
* Added support for NOAA-18(N) and METOP-2 datasets; tiny code refactoring.
* L1B : add auto guess of data format when it is 2 spaces or empty string
* The GAC GCPs are not tied to the center of pixel.
* Serious code rewriting in order to read datasets withour archive header
* Serious code rewriting in order to read datasets without archive header

MEM driver:
* Avoid failure when doing mem_driver->Create('MEM:::')
Expand Down Expand Up @@ -8016,7 +8016,7 @@ VRT driver:
* Use VSIF Large API in VRTDataset::Open to fix #1070
* recover from failure to create transformer (#2240)
* Added LUT based transformation support to the VRTComplexSource
* Extend the output of <SimpleSource> in a forward and backword compatibly way, and make use of GDALProxyPoolDataset when possible
* Extend the output of <SimpleSource> in a forward and backward compatible way, and make use of GDALProxyPoolDataset when possible
* Add the <ColorTableComponent> element to <ComplexSource> to do color table expansion in the VRT
* Fix failure when attempting to read a warped VRT made from a 3-band dataset with -dstalpha option (#2502)
* In VRTDerivedRasterBand::IRasterIO() don't call RasterIO() on sources with 0,0 for nPixelSpace and nLineSpace as most sources, except VRTSimpleSource, don't translate them.
Expand Down
2 changes: 1 addition & 1 deletion gdal/Vagrantfile
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
# these options are also specified by environment variables;
# VAGRANT_HTTP_PROXY, VAGRANT_HTTPS_PROXY, VAGRANT_FTP_PROXY
# VAGRANT_NO_PROXY, VAGRANT_SVN_PROXY, VAGRANT_GIT_PROXY
# if you want to set these on Vagrantfile, edit followings.
# if you want to set these on Vagrantfile, edit the following:
if Vagrant.has_plugin?("vagrant-proxyconf")
config.proxy.enabled = false # true|false
#config.proxy.http = "http://192.168.0.2:3128"
Expand Down
2 changes: 1 addition & 1 deletion gdal/alg/gdalrasterize.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -721,7 +721,7 @@ CPLErr GDALRasterizeGeometries( GDALDatasetH hDS,
if( eOptim == GRO_Auto )
{
eOptim = GRO_Raster;
// TODO make more tests with various inputs/outputs to ajust the parameters
// TODO make more tests with various inputs/outputs to adjust the parameters
if( nYBlockSize > 1 && nGeomCount > 10000 && (poBand->GetXSize() * static_cast<long long>(poBand->GetYSize()) / nGeomCount > 50) )
{
eOptim = GRO_Vector;
Expand Down
8 changes: 4 additions & 4 deletions gdal/apps/gdalwarp_lib.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2876,9 +2876,9 @@ TransformCutlineToSource( GDALDatasetH hSrcDS, OGRGeometryH hCutline,
reinterpret_cast<OGRGeometry*>(hMultiPolygon) );

CPLPushErrorHandler(CPLQuietErrorHandler);
const bool bWasValidInitialy = LooseValidateCutline(hMultiPolygon);
const bool bWasValidInitially = LooseValidateCutline(hMultiPolygon);
CPLPopErrorHandler();
if( !bWasValidInitialy )
if( !bWasValidInitially )
{
CPLDebug("WARP", "Cutline is not valid after initial reprojection");
char *pszWKT = nullptr;
Expand All @@ -2893,7 +2893,7 @@ TransformCutlineToSource( GDALDatasetH hSrcDS, OGRGeometryH hCutline,
const char* pszDensifyCutline = CPLGetConfigOption("GDALWARP_DENSIFY_CUTLINE", "YES");
if( EQUAL(pszDensifyCutline, "ONLY_IF_INVALID") )
{
bDensify = ( OGRGeometryFactory::haveGEOS() && !bWasValidInitialy );
bDensify = ( OGRGeometryFactory::haveGEOS() && !bWasValidInitially );
}
else if( CSLFetchNameValue( *ppapszWarpOptions, "CUTLINE_BLEND_DIST" ) != nullptr &&
CPLGetConfigOption("GDALWARP_DENSIFY_CUTLINE", nullptr) == nullptr )
Expand Down Expand Up @@ -2935,7 +2935,7 @@ TransformCutlineToSource( GDALDatasetH hSrcDS, OGRGeometryH hCutline,
{
const double dfMaxLengthInPixels = GetMaximumSegmentLength(
reinterpret_cast<OGRGeometry*>(hMultiPolygon) );
if( bWasValidInitialy )
if( bWasValidInitially )
{
// In some cases, the densification itself results in a reprojected
// invalid polygon due to the non-linearity of RPC DEM transformation,
Expand Down
Loading

0 comments on commit 6ef1319

Please sign in to comment.