This commit is contained in:
2025-01-26 19:24:23 -08:00
parent 32cd60e92b
commit d1dde0dbc6
4155 changed files with 29170 additions and 216373 deletions

View File

@@ -1,27 +1,32 @@
"""Vectorized vector I/O using OGR."""
try:
# we try importing shapely, to ensure it is imported (and it can load its
# own GEOS copy) before we load GDAL and its linked GEOS
import shapely # noqa
import shapely.geos # noqa
import shapely
import shapely.geos # noqa: F401
except Exception:
pass
from pyogrio._version import get_versions
from pyogrio.core import (
list_drivers,
__gdal_geos_version__,
__gdal_version__,
__gdal_version_string__,
detect_write_driver,
get_gdal_config_option,
get_gdal_data_path,
list_drivers,
list_layers,
read_bounds,
read_info,
set_gdal_config_options,
get_gdal_config_option,
get_gdal_data_path,
__gdal_version__,
__gdal_version_string__,
__gdal_geos_version__,
vsi_listtree,
vsi_rmtree,
vsi_unlink,
)
from pyogrio.geopandas import read_dataframe, write_dataframe
from pyogrio._version import get_versions
from pyogrio.raw import open_arrow, read_arrow, write_arrow
__version__ = get_versions()["version"]
del get_versions
@@ -35,7 +40,13 @@ __all__ = [
"set_gdal_config_options",
"get_gdal_config_option",
"get_gdal_data_path",
"open_arrow",
"read_arrow",
"read_dataframe",
"vsi_listtree",
"vsi_rmtree",
"vsi_unlink",
"write_arrow",
"write_dataframe",
"__gdal_version__",
"__gdal_version_string__",

View File

@@ -1,6 +1,6 @@
from packaging.version import Version
from pyogrio.core import __gdal_version__, __gdal_geos_version__
from pyogrio.core import __gdal_geos_version__, __gdal_version__
# detect optional dependencies
try:
@@ -8,6 +8,11 @@ try:
except ImportError:
pyarrow = None
try:
import pyproj
except ImportError:
pyproj = None
try:
import shapely
except ImportError:
@@ -24,12 +29,18 @@ except ImportError:
pandas = None
HAS_ARROW_API = __gdal_version__ >= (3, 6, 0) and pyarrow is not None
HAS_ARROW_API = __gdal_version__ >= (3, 6, 0)
HAS_ARROW_WRITE_API = __gdal_version__ >= (3, 8, 0)
HAS_PYARROW = pyarrow is not None
HAS_PYPROJ = pyproj is not None
HAS_GEOPANDAS = geopandas is not None
PANDAS_GE_15 = pandas is not None and Version(pandas.__version__) >= Version("1.5.0")
PANDAS_GE_20 = pandas is not None and Version(pandas.__version__) >= Version("2.0.0")
PANDAS_GE_22 = pandas is not None and Version(pandas.__version__) >= Version("2.2.0")
GDAL_GE_38 = __gdal_version__ >= (3, 8, 0)
HAS_GDAL_GEOS = __gdal_geos_version__ is not None

View File

@@ -4,13 +4,11 @@
# adapted from Fiona: https://github.com/Toblerity/Fiona/pull/875
from contextlib import contextmanager
import logging
import os
from pathlib import Path
import platform
import sys
from contextlib import contextmanager
from pathlib import Path
log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
@@ -29,10 +27,10 @@ except ImportError:
gdal_dll_dir = None
if platform.system() == "Windows" and sys.version_info >= (3, 8):
if platform.system() == "Windows":
# if loading of extension modules fails, search for gdal dll directory
try:
import pyogrio._io # NOQA
import pyogrio._io # noqa: F401
except ImportError:
for path in os.getenv("PATH", "").split(os.pathsep):

View File

@@ -1,4 +0,0 @@
cdef object exc_check()
cdef int exc_wrap_int(int retval) except -1
cdef int exc_wrap_ogrerr(int retval) except -1
cdef void *exc_wrap_pointer(void *ptr) except NULL

View File

@@ -1,246 +0,0 @@
# ported from fiona::_err.pyx
from enum import IntEnum
import warnings
from pyogrio._ogr cimport (
CE_None, CE_Debug, CE_Warning, CE_Failure, CE_Fatal, CPLErrorReset,
CPLGetLastErrorType, CPLGetLastErrorNo, CPLGetLastErrorMsg, OGRErr,
CPLErr, CPLErrorHandler, CPLDefaultErrorHandler, CPLPushErrorHandler)
# CPL Error types as an enum.
class GDALError(IntEnum):
none = CE_None
debug = CE_Debug
warning = CE_Warning
failure = CE_Failure
fatal = CE_Fatal
class CPLE_BaseError(Exception):
"""Base CPL error class.
For internal use within Cython only.
"""
def __init__(self, error, errno, errmsg):
self.error = error
self.errno = errno
self.errmsg = errmsg
def __str__(self):
return self.__unicode__()
def __unicode__(self):
return u"{}".format(self.errmsg)
@property
def args(self):
return self.error, self.errno, self.errmsg
class CPLE_AppDefinedError(CPLE_BaseError):
pass
class CPLE_OutOfMemoryError(CPLE_BaseError):
pass
class CPLE_FileIOError(CPLE_BaseError):
pass
class CPLE_OpenFailedError(CPLE_BaseError):
pass
class CPLE_IllegalArgError(CPLE_BaseError):
pass
class CPLE_NotSupportedError(CPLE_BaseError):
pass
class CPLE_AssertionFailedError(CPLE_BaseError):
pass
class CPLE_NoWriteAccessError(CPLE_BaseError):
pass
class CPLE_UserInterruptError(CPLE_BaseError):
pass
class ObjectNullError(CPLE_BaseError):
pass
class CPLE_HttpResponseError(CPLE_BaseError):
pass
class CPLE_AWSBucketNotFoundError(CPLE_BaseError):
pass
class CPLE_AWSObjectNotFoundError(CPLE_BaseError):
pass
class CPLE_AWSAccessDeniedError(CPLE_BaseError):
pass
class CPLE_AWSInvalidCredentialsError(CPLE_BaseError):
pass
class CPLE_AWSSignatureDoesNotMatchError(CPLE_BaseError):
pass
class NullPointerError(CPLE_BaseError):
"""
Returned from exc_wrap_pointer when a NULL pointer is passed, but no GDAL
error was raised.
"""
pass
# Map of GDAL error numbers to the Python exceptions.
exception_map = {
1: CPLE_AppDefinedError,
2: CPLE_OutOfMemoryError,
3: CPLE_FileIOError,
4: CPLE_OpenFailedError,
5: CPLE_IllegalArgError,
6: CPLE_NotSupportedError,
7: CPLE_AssertionFailedError,
8: CPLE_NoWriteAccessError,
9: CPLE_UserInterruptError,
10: ObjectNullError,
# error numbers 11-16 are introduced in GDAL 2.1. See
# https://github.com/OSGeo/gdal/pull/98.
11: CPLE_HttpResponseError,
12: CPLE_AWSBucketNotFoundError,
13: CPLE_AWSObjectNotFoundError,
14: CPLE_AWSAccessDeniedError,
15: CPLE_AWSInvalidCredentialsError,
16: CPLE_AWSSignatureDoesNotMatchError
}
cdef inline object exc_check():
"""Checks GDAL error stack for fatal or non-fatal errors
Returns
-------
An Exception, SystemExit, or None
"""
cdef const char *msg_c = NULL
err_type = CPLGetLastErrorType()
err_no = CPLGetLastErrorNo()
err_msg = CPLGetLastErrorMsg()
if err_msg == NULL:
msg = "No error message."
else:
# Reformat messages.
msg_b = err_msg
msg = msg_b.decode('utf-8')
msg = msg.replace("`", "'")
msg = msg.replace("\n", " ")
if err_type == 3:
CPLErrorReset()
return exception_map.get(
err_no, CPLE_BaseError)(err_type, err_no, msg)
if err_type == 4:
return SystemExit("Fatal error: {0}".format((err_type, err_no, msg)))
else:
return
cdef void *exc_wrap_pointer(void *ptr) except NULL:
"""Wrap a GDAL/OGR function that returns GDALDatasetH etc (void *)
Raises an exception if a non-fatal error has be set or if pointer is NULL.
"""
if ptr == NULL:
exc = exc_check()
if exc:
raise exc
else:
# null pointer was passed, but no error message from GDAL
raise NullPointerError(-1, -1, "NULL pointer error")
return ptr
cdef int exc_wrap_int(int err) except -1:
"""Wrap a GDAL/OGR function that returns CPLErr or OGRErr (int)
Raises an exception if a non-fatal error has be set.
Copied from Fiona (_err.pyx).
"""
if err:
exc = exc_check()
if exc:
raise exc
else:
# no error message from GDAL
raise CPLE_BaseError(-1, -1, "Unspecified OGR / GDAL error")
return err
cdef int exc_wrap_ogrerr(int err) except -1:
"""Wrap a function that returns OGRErr (int) but does not use the
CPL error stack.
Adapted from Fiona (_err.pyx).
"""
if err != 0:
raise CPLE_BaseError(3, err, f"OGR Error code {err}")
return err
cdef void error_handler(CPLErr err_class, int err_no, const char* err_msg) nogil:
"""Custom CPL error handler to match the Python behaviour.
Generally we want to suppress error printing to stderr (behaviour of the
default GDAL error handler) because we already raise a Python exception
that includes the error message.
"""
if err_class == CE_Fatal:
# If the error class is CE_Fatal, we want to have a message issued
# because the CPL support code does an abort() before any exception
# can be generated
CPLDefaultErrorHandler(err_class, err_no, err_msg)
return
elif err_class == CE_Failure:
# For Failures, do nothing as those are explicitly caught
# with error return codes and translated into Python exceptions
return
elif err_class == CE_Warning:
with gil:
msg_b = err_msg
msg = msg_b.decode('utf-8')
warnings.warn(msg, RuntimeWarning)
return
# Fall back to the default handler for non-failure messages since
# they won't be translated into exceptions.
CPLDefaultErrorHandler(err_class, err_no, err_msg)
def _register_error_handler():
CPLPushErrorHandler(<CPLErrorHandler>error_handler)

View File

@@ -1,4 +0,0 @@
from pyogrio._ogr cimport *
cdef str get_geometry_type(void *ogr_layer)
cdef OGRwkbGeometryType get_geometry_type_code(str geometry_type) except *

View File

@@ -1,129 +0,0 @@
import warnings
from pyogrio._ogr cimport *
from pyogrio._err cimport *
from pyogrio._err import CPLE_BaseError, NullPointerError
from pyogrio.errors import DataLayerError, GeometryError
# Mapping of OGR integer geometry types to GeoJSON type names.
GEOMETRY_TYPES = {
wkbUnknown: 'Unknown',
wkbPoint: 'Point',
wkbLineString: 'LineString',
wkbPolygon: 'Polygon',
wkbMultiPoint: 'MultiPoint',
wkbMultiLineString: 'MultiLineString',
wkbMultiPolygon: 'MultiPolygon',
wkbGeometryCollection: 'GeometryCollection',
wkbNone: None,
wkbLinearRing: 'LinearRing',
# WARNING: Measured types are not supported in GEOS and downstream uses
# these are stripped automatically to their corresponding 2D / 3D types
wkbPointM: 'PointM',
wkbLineStringM: 'Measured LineString',
wkbPolygonM: 'Measured Polygon',
wkbMultiPointM: 'Measured MultiPoint',
wkbMultiLineStringM: 'Measured MultiLineString',
wkbMultiPolygonM: 'Measured MultiPolygon',
wkbGeometryCollectionM: 'Measured GeometryCollection',
wkbPointZM: 'Measured 3D Point',
wkbLineStringZM: 'Measured 3D LineString',
wkbPolygonZM: 'Measured 3D Polygon',
wkbMultiPointZM: 'Measured 3D MultiPoint',
wkbMultiLineStringZM: 'Measured 3D MultiLineString',
wkbMultiPolygonZM: 'Measured 3D MultiPolygon',
wkbGeometryCollectionZM: 'Measured 3D GeometryCollection',
wkbPoint25D: 'Point Z',
wkbLineString25D: 'LineString Z',
wkbPolygon25D: 'Polygon Z',
wkbMultiPoint25D: 'MultiPoint Z',
wkbMultiLineString25D: 'MultiLineString Z',
wkbMultiPolygon25D: 'MultiPolygon Z',
wkbGeometryCollection25D: 'GeometryCollection Z',
}
GEOMETRY_TYPE_CODES = {v:k for k, v in GEOMETRY_TYPES.items()}
# add additional aliases from 2.5D format
GEOMETRY_TYPE_CODES.update({
'2.5D Point': wkbPoint25D,
'2.5D LineString': wkbLineString25D,
'2.5D Polygon': wkbPolygon25D,
'2.5D MultiPoint': wkbMultiPoint25D,
'2.5D MultiLineString': wkbMultiLineString25D,
'2.5D MultiPolygon': wkbMultiPolygon25D,
'2.5D GeometryCollection': wkbGeometryCollection25D
})
# 2.5D also represented using negative numbers not enumerated above
GEOMETRY_TYPES.update({
-2147483647: 'Point Z',
-2147483646: 'LineString Z',
-2147483645: 'Polygon Z',
-2147483644: 'MultiPoint Z',
-2147483643: 'MultiLineString Z',
-2147483642: 'MultiPolygon Z',
-2147483641: 'GeometryCollection Z',
})
cdef str get_geometry_type(void *ogr_layer):
"""Get geometry type for layer.
Parameters
----------
ogr_layer : pointer to open OGR layer
Returns
-------
str
geometry type
"""
cdef void *cogr_featuredef = NULL
cdef OGRwkbGeometryType ogr_type
try:
ogr_featuredef = exc_wrap_pointer(OGR_L_GetLayerDefn(ogr_layer))
except NullPointerError:
raise DataLayerError("Could not get layer definition")
except CPLE_BaseError as exc:
raise DataLayerError(str(exc))
ogr_type = OGR_FD_GetGeomType(ogr_featuredef)
if ogr_type not in GEOMETRY_TYPES:
raise GeometryError(f"Geometry type is not supported: {ogr_type}")
if OGR_GT_HasM(ogr_type):
original_type = GEOMETRY_TYPES[ogr_type]
# Downgrade the type to 2D / 3D
ogr_type = OGR_GT_SetModifier(ogr_type, OGR_GT_HasZ(ogr_type), 0)
# TODO: review; this might be annoying...
warnings.warn(
"Measured (M) geometry types are not supported. "
f"Original type '{original_type}' "
f"is converted to '{GEOMETRY_TYPES[ogr_type]}'")
return GEOMETRY_TYPES[ogr_type]
cdef OGRwkbGeometryType get_geometry_type_code(str geometry_type) except *:
"""Get geometry type code for string geometry type.
Parameters
----------
geometry_type : str
Returns
-------
int
geometry type code
"""
if geometry_type not in GEOMETRY_TYPE_CODES:
raise GeometryError(f"Geometry type is not supported: {geometry_type}")
return GEOMETRY_TYPE_CODES[geometry_type]

File diff suppressed because it is too large Load Diff

View File

@@ -1,388 +0,0 @@
# Contains declarations against GDAL / OGR API
from libc.stdint cimport int64_t, int8_t
from libc.stdio cimport FILE
cdef extern from "cpl_conv.h":
ctypedef unsigned char GByte
void* CPLMalloc(size_t)
void CPLFree(void *ptr)
const char* CPLFindFile(const char *pszClass, const char *filename)
const char* CPLGetConfigOption(const char* key, const char* value)
void CPLSetConfigOption(const char* key, const char* value)
cdef extern from "cpl_error.h" nogil:
ctypedef enum CPLErr:
CE_None
CE_Debug
CE_Warning
CE_Failure
CE_Fatal
void CPLErrorReset()
int CPLGetLastErrorNo()
const char* CPLGetLastErrorMsg()
int CPLGetLastErrorType()
ctypedef void (*CPLErrorHandler)(CPLErr, int, const char*)
void CPLDefaultErrorHandler(CPLErr, int, const char *)
void CPLPushErrorHandler(CPLErrorHandler handler)
void CPLPopErrorHandler()
cdef extern from "cpl_string.h":
char** CSLAddNameValue(char **list, const char *name, const char *value)
char** CSLSetNameValue(char **list, const char *name, const char *value)
void CSLDestroy(char **list)
char** CSLAddString(char **list, const char *string)
int CSLCount(char **list)
cdef extern from "cpl_vsi.h" nogil:
ctypedef FILE VSILFILE
VSILFILE *VSIFileFromMemBuffer(const char *path, void *data,
int data_len, int take_ownership)
int VSIFCloseL(VSILFILE *fp)
int VSIUnlink(const char *path)
cdef extern from "ogr_core.h":
ctypedef enum OGRErr:
OGRERR_NONE # success
OGRERR_NOT_ENOUGH_DATA
OGRERR_NOT_ENOUGH_MEMORY
OGRERR_UNSUPPORTED_GEOMETRY_TYPE
OGRERR_UNSUPPORTED_OPERATION
OGRERR_CORRUPT_DATA
OGRERR_FAILURE
OGRERR_UNSUPPORTED_SRS
OGRERR_INVALID_HANDLE
OGRERR_NON_EXISTING_FEATURE
ctypedef enum OGRwkbGeometryType:
wkbUnknown
wkbPoint
wkbLineString
wkbPolygon
wkbMultiPoint
wkbMultiLineString
wkbMultiPolygon
wkbGeometryCollection
wkbCircularString
wkbCompoundCurve
wkbCurvePolygon
wkbMultiCurve
wkbMultiSurface
wkbCurve
wkbSurface
wkbPolyhedralSurface
wkbTIN
wkbTriangle
wkbNone
wkbLinearRing
wkbCircularStringZ
wkbCompoundCurveZ
wkbCurvePolygonZ
wkbMultiCurveZ
wkbMultiSurfaceZ
wkbCurveZ
wkbSurfaceZ
wkbPolyhedralSurfaceZ
wkbTINZ
wkbTriangleZ
wkbPointM
wkbLineStringM
wkbPolygonM
wkbMultiPointM
wkbMultiLineStringM
wkbMultiPolygonM
wkbGeometryCollectionM
wkbCircularStringM
wkbCompoundCurveM
wkbCurvePolygonM
wkbMultiCurveM
wkbMultiSurfaceM
wkbCurveM
wkbSurfaceM
wkbPolyhedralSurfaceM
wkbTINM
wkbTriangleM
wkbPointZM
wkbLineStringZM
wkbPolygonZM
wkbMultiPointZM
wkbMultiLineStringZM
wkbMultiPolygonZM
wkbGeometryCollectionZM
wkbCircularStringZM
wkbCompoundCurveZM
wkbCurvePolygonZM
wkbMultiCurveZM
wkbMultiSurfaceZM
wkbCurveZM
wkbSurfaceZM
wkbPolyhedralSurfaceZM
wkbTINZM
wkbTriangleZM
wkbPoint25D
wkbLineString25D
wkbPolygon25D
wkbMultiPoint25D
wkbMultiLineString25D
wkbMultiPolygon25D
wkbGeometryCollection25D
ctypedef enum OGRFieldType:
OFTInteger
OFTIntegerList
OFTReal
OFTRealList
OFTString
OFTStringList
OFTWideString
OFTWideStringList
OFTBinary
OFTDate
OFTTime
OFTDateTime
OFTInteger64
OFTInteger64List
OFTMaxType
ctypedef enum OGRFieldSubType:
OFSTNone
OFSTBoolean
OFSTInt16
OFSTFloat32
ctypedef void* OGRDataSourceH
ctypedef void* OGRFeatureDefnH
ctypedef void* OGRFieldDefnH
ctypedef void* OGRFeatureH
ctypedef void* OGRGeometryH
ctypedef void* OGRLayerH
ctypedef void* OGRSFDriverH
ctypedef struct OGREnvelope:
double MinX
double MaxX
double MinY
double MaxY
cdef extern from "ogr_srs_api.h":
ctypedef void* OGRSpatialReferenceH
int OSRAutoIdentifyEPSG(OGRSpatialReferenceH srs)
OGRErr OSRExportToWkt(OGRSpatialReferenceH srs, char **params)
const char* OSRGetAuthorityName(OGRSpatialReferenceH srs, const char *key)
const char* OSRGetAuthorityCode(OGRSpatialReferenceH srs, const char *key)
OGRErr OSRImportFromEPSG(OGRSpatialReferenceH srs, int code)
int OSRSetFromUserInput(OGRSpatialReferenceH srs, const char *pszDef)
void OSRSetPROJSearchPaths(const char *const *paths)
OGRSpatialReferenceH OSRNewSpatialReference(const char *wkt)
void OSRRelease(OGRSpatialReferenceH srs)
cdef extern from "arrow_bridge.h":
struct ArrowSchema:
int64_t n_children
struct ArrowArrayStream:
int (*get_schema)(ArrowArrayStream* stream, ArrowSchema* out)
cdef extern from "ogr_api.h":
int OGRGetDriverCount()
OGRSFDriverH OGRGetDriver(int)
OGRDataSourceH OGR_Dr_Open(OGRSFDriverH driver, const char *path, int bupdate)
const char* OGR_Dr_GetName(OGRSFDriverH driver)
OGRFeatureH OGR_F_Create(OGRFeatureDefnH featuredefn)
void OGR_F_Destroy(OGRFeatureH feature)
int64_t OGR_F_GetFID(OGRFeatureH feature)
OGRGeometryH OGR_F_GetGeometryRef(OGRFeatureH feature)
GByte* OGR_F_GetFieldAsBinary(OGRFeatureH feature, int n, int *s)
int OGR_F_GetFieldAsDateTimeEx(OGRFeatureH feature, int n, int *y, int *m, int *d, int *h, int *m, float *s, int *z)
double OGR_F_GetFieldAsDouble(OGRFeatureH feature, int n)
int OGR_F_GetFieldAsInteger(OGRFeatureH feature, int n)
int64_t OGR_F_GetFieldAsInteger64(OGRFeatureH feature, int n)
const char* OGR_F_GetFieldAsString(OGRFeatureH feature, int n)
int OGR_F_IsFieldSetAndNotNull(OGRFeatureH feature, int n)
void OGR_F_SetFieldDateTime(OGRFeatureH feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz)
void OGR_F_SetFieldDouble(OGRFeatureH feature, int n, double value)
void OGR_F_SetFieldInteger(OGRFeatureH feature, int n, int value)
void OGR_F_SetFieldInteger64(OGRFeatureH feature, int n, int64_t value)
void OGR_F_SetFieldString(OGRFeatureH feature, int n, char *value)
void OGR_F_SetFieldBinary(OGRFeatureH feature, int n, int l, unsigned char *value)
void OGR_F_SetFieldNull(OGRFeatureH feature, int n) # new in GDAL 2.2
void OGR_F_SetFieldDateTimeEx(
OGRFeatureH hFeat,
int iField,
int nYear,
int nMonth,
int nDay,
int nHour,
int nMinute,
float fSecond,
int nTZFlag)
OGRErr OGR_F_SetGeometryDirectly(OGRFeatureH feature, OGRGeometryH geometry)
OGRFeatureDefnH OGR_FD_Create(const char *name)
int OGR_FD_GetFieldCount(OGRFeatureDefnH featuredefn)
OGRFeatureDefnH OGR_FD_GetFieldDefn(OGRFeatureDefnH featuredefn, int n)
OGRwkbGeometryType OGR_FD_GetGeomType(OGRFeatureDefnH featuredefn)
OGRFieldDefnH OGR_Fld_Create(const char *name, OGRFieldType fieldtype)
void OGR_Fld_Destroy(OGRFieldDefnH fielddefn)
const char* OGR_Fld_GetNameRef(OGRFieldDefnH fielddefn)
int OGR_Fld_GetPrecision(OGRFieldDefnH fielddefn)
OGRFieldSubType OGR_Fld_GetSubType(OGRFieldDefnH fielddefn)
int OGR_Fld_GetType(OGRFieldDefnH fielddefn)
int OGR_Fld_GetWidth(OGRFieldDefnH fielddefn)
void OGR_Fld_Set(OGRFieldDefnH fielddefn, const char *name, int fieldtype, int width, int precision, int justification)
void OGR_Fld_SetPrecision(OGRFieldDefnH fielddefn, int n)
void OGR_Fld_SetWidth(OGRFieldDefnH fielddefn, int n)
void OGR_Fld_SetSubType(OGRFieldDefnH fielddefn, OGRFieldSubType subtype)
OGRGeometryH OGR_G_CreateGeometry(int wkbtypecode)
OGRErr OGR_G_CreateFromWkb(const void *bytes, OGRSpatialReferenceH srs, OGRGeometryH *geometry, int nbytes)
void OGR_G_DestroyGeometry(OGRGeometryH geometry)
void OGR_G_ExportToWkb(OGRGeometryH geometry, int endianness, unsigned char *buffer)
void OGR_G_GetEnvelope(OGRGeometryH geometry, OGREnvelope* envelope)
OGRwkbGeometryType OGR_G_GetGeometryType(OGRGeometryH)
OGRGeometryH OGR_G_GetLinearGeometry(OGRGeometryH hGeom, double dfMaxAngleStepSizeDegrees, char **papszOptions)
OGRErr OGR_G_ImportFromWkb(OGRGeometryH geometry, const void *bytes, int nbytes)
int OGR_G_IsMeasured(OGRGeometryH geometry)
void OGR_G_SetMeasured(OGRGeometryH geometry, int isMeasured)
int OGR_G_Is3D(OGRGeometryH geometry)
void OGR_G_Set3D(OGRGeometryH geometry, int is3D)
int OGR_G_WkbSize(OGRGeometryH geometry)
OGRGeometryH OGR_G_ForceToMultiPoint(OGRGeometryH geometry)
OGRGeometryH OGR_G_ForceToMultiLineString(OGRGeometryH geometry)
OGRGeometryH OGR_G_ForceToMultiPolygon(OGRGeometryH geometry)
int OGR_GT_HasM(OGRwkbGeometryType eType)
int OGR_GT_HasZ(OGRwkbGeometryType eType)
int OGR_GT_IsNonLinear(OGRwkbGeometryType eType)
OGRwkbGeometryType OGR_GT_SetModifier(OGRwkbGeometryType eType, int setZ, int setM)
OGRErr OGR_L_CreateFeature(OGRLayerH layer, OGRFeatureH feature)
OGRErr OGR_L_CreateField(OGRLayerH layer, OGRFieldDefnH fielddefn, int flexible)
const char* OGR_L_GetName(OGRLayerH layer)
const char* OGR_L_GetFIDColumn(OGRLayerH layer)
const char* OGR_L_GetGeometryColumn(OGRLayerH layer)
OGRErr OGR_L_GetExtent(OGRLayerH layer, OGREnvelope *psExtent, int bForce)
OGRSpatialReferenceH OGR_L_GetSpatialRef(OGRLayerH layer)
int OGR_L_TestCapability(OGRLayerH layer, const char *name)
OGRFeatureDefnH OGR_L_GetLayerDefn(OGRLayerH layer)
OGRFeatureH OGR_L_GetNextFeature(OGRLayerH layer)
OGRFeatureH OGR_L_GetFeature(OGRLayerH layer, int nFeatureId)
void OGR_L_ResetReading(OGRLayerH layer)
OGRErr OGR_L_SetAttributeFilter(OGRLayerH hLayer, const char* pszQuery)
OGRErr OGR_L_SetNextByIndex(OGRLayerH layer, int nIndex)
int OGR_L_GetFeatureCount(OGRLayerH layer, int m)
void OGR_L_SetSpatialFilterRect(OGRLayerH layer, double xmin, double ymin, double xmax, double ymax)
void OGR_L_SetSpatialFilter(OGRLayerH layer, OGRGeometryH geometry)
OGRErr OGR_L_SetIgnoredFields(OGRLayerH layer, const char** fields)
void OGRSetNonLinearGeometriesEnabledFlag(int bFlag)
int OGRGetNonLinearGeometriesEnabledFlag()
int OGRReleaseDataSource(OGRDataSourceH ds)
const char* OLCStringsAsUTF8
const char* OLCRandomRead
const char* OLCFastSetNextByIndex
const char* OLCFastSpatialFilter
const char* OLCFastFeatureCount
const char* OLCFastGetExtent
const char* OLCTransactions
IF CTE_GDAL_VERSION >= (3, 6, 0):
cdef extern from "ogr_api.h":
int8_t OGR_L_GetArrowStream(OGRLayerH hLayer, ArrowArrayStream *out_stream, char** papszOptions)
cdef extern from "gdal.h":
ctypedef enum GDALDataType:
GDT_Unknown
GDT_Byte
GDT_UInt16
GDT_Int16
GDT_UInt32
GDT_Int32
GDT_Float32
GDT_Float64
GDT_CInt16
GDT_CInt32
GDT_CFloat32
GDT_CFloat64
GDT_TypeCount
int GDAL_OF_UPDATE
int GDAL_OF_READONLY
int GDAL_OF_VECTOR
int GDAL_OF_VERBOSE_ERROR
ctypedef void* GDALDatasetH
ctypedef void* GDALDriverH
ctypedef void * GDALMajorObjectH
void GDALAllRegister()
GDALDatasetH GDALCreate(OGRSFDriverH driver,
const char * pszFilename,
int nXSize,
int nYSize,
int nBands,
GDALDataType eBandType,
char ** papszOptions)
OGRLayerH GDALDatasetCreateLayer(GDALDatasetH ds,
const char * pszName,
OGRSpatialReferenceH hSpatialRef,
int eType,
char ** papszOptions)
int GDALDatasetDeleteLayer(GDALDatasetH hDS, int iLayer)
GDALDriverH GDALGetDatasetDriver(GDALDatasetH ds)
GDALDriverH GDALGetDriverByName(const char * pszName)
GDALDatasetH GDALOpenEx(const char * pszFilename,
unsigned int nOpenFlags,
const char *const *papszAllowedDrivers,
const char *const *papszOpenOptions,
const char *const *papszSiblingFiles)
void GDALClose(GDALDatasetH ds)
int GDALDatasetGetLayerCount(GDALDatasetH ds)
OGRLayerH GDALDatasetGetLayer(GDALDatasetH ds, int iLayer)
OGRLayerH GDALDatasetGetLayerByName(GDALDatasetH ds, char * pszName)
OGRLayerH GDALDatasetExecuteSQL(
GDALDatasetH ds,
const char* pszStatement,
OGRGeometryH hSpatialFilter,
const char* pszDialect)
void GDALDatasetReleaseResultSet(GDALDatasetH, OGRLayerH)
OGRErr GDALDatasetStartTransaction(GDALDatasetH ds, int bForce)
OGRErr GDALDatasetCommitTransaction(GDALDatasetH ds)
OGRErr GDALDatasetRollbackTransaction(GDALDatasetH ds)
char** GDALGetMetadata(GDALMajorObjectH obj, const char *pszDomain)
const char* GDALGetMetadataItem(GDALMajorObjectH obj, const char *pszName, const char *pszDomain)
OGRErr GDALSetMetadata(GDALMajorObjectH obj, char **metadata, const char *pszDomain)
const char* GDALVersionInfo(const char *pszRequest)
cdef get_string(const char *c_str, str encoding=*)

View File

@@ -1,362 +0,0 @@
import os
import sys
from uuid import uuid4
import warnings
from pyogrio._err cimport exc_wrap_int, exc_wrap_ogrerr, exc_wrap_pointer
from pyogrio._err import CPLE_BaseError, NullPointerError
from pyogrio.errors import DataSourceError
cdef get_string(const char *c_str, str encoding="UTF-8"):
"""Get Python string from a char *
IMPORTANT: the char * must still be freed by the caller.
Parameters
----------
c_str : char *
encoding : str, optional (default: UTF-8)
Returns
-------
Python string
"""
cdef bytes py_str
py_str = c_str
return py_str.decode(encoding)
def get_gdal_version():
"""Convert GDAL version number into tuple of (major, minor, revision)"""
version = int(GDALVersionInfo("VERSION_NUM"))
major = version // 1000000
minor = (version - (major * 1000000)) // 10000
revision = (version - (major * 1000000) - (minor * 10000)) // 100
return (major, minor, revision)
def get_gdal_version_string():
cdef const char* version = GDALVersionInfo("RELEASE_NAME")
return get_string(version)
IF CTE_GDAL_VERSION >= (3, 4, 0):
cdef extern from "ogr_api.h":
bint OGRGetGEOSVersion(int *pnMajor, int *pnMinor, int *pnPatch)
def get_gdal_geos_version():
cdef int major, minor, revision
IF CTE_GDAL_VERSION >= (3, 4, 0):
if not OGRGetGEOSVersion(&major, &minor, &revision):
return None
return (major, minor, revision)
ELSE:
return None
def set_gdal_config_options(dict options):
for name, value in options.items():
name_b = name.encode('utf-8')
name_c = name_b
# None is a special case; this is used to clear the previous value
if value is None:
CPLSetConfigOption(<const char*>name_c, NULL)
continue
# normalize bool to ON/OFF
if isinstance(value, bool):
value_b = b'ON' if value else b'OFF'
else:
value_b = str(value).encode('utf-8')
value_c = value_b
CPLSetConfigOption(<const char*>name_c, <const char*>value_c)
def get_gdal_config_option(str name):
name_b = name.encode('utf-8')
name_c = name_b
value = CPLGetConfigOption(<const char*>name_c, NULL)
if not value:
return None
if value.isdigit():
return int(value)
if value == b'ON':
return True
if value == b'OFF':
return False
str_value = get_string(value)
return str_value
def ogr_driver_supports_write(driver):
# check metadata for driver to see if it supports write
if _get_driver_metadata_item(driver, "DCAP_CREATE") == 'YES':
return True
return False
def ogr_list_drivers():
cdef OGRSFDriverH driver = NULL
cdef int i
cdef char *name_c
drivers = dict()
for i in range(OGRGetDriverCount()):
driver = OGRGetDriver(i)
name_c = <char *>OGR_Dr_GetName(driver)
name = get_string(name_c)
if ogr_driver_supports_write(name):
drivers[name] = "rw"
else:
drivers[name] = "r"
return drivers
def buffer_to_virtual_file(bytesbuf, ext=''):
"""Maps a bytes buffer to a virtual file.
`ext` is empty or begins with a period and contains at most one period.
This (and remove_virtual_file) is originally copied from the Fiona project
(https://github.com/Toblerity/Fiona/blob/c388e9adcf9d33e3bb04bf92b2ff210bbce452d9/fiona/ogrext.pyx#L1863-L1879)
"""
vsi_filename = f"/vsimem/{uuid4().hex + ext}"
vsi_handle = VSIFileFromMemBuffer(vsi_filename.encode("UTF-8"), <unsigned char *>bytesbuf, len(bytesbuf), 0)
if vsi_handle == NULL:
raise OSError('failed to map buffer to file')
if VSIFCloseL(vsi_handle) != 0:
raise OSError('failed to close mapped file handle')
return vsi_filename
def remove_virtual_file(vsi_filename):
return VSIUnlink(vsi_filename.encode("UTF-8"))
cdef void set_proj_search_path(str path):
"""Set PROJ library data file search path for use in GDAL."""
cdef char **paths = NULL
cdef const char *path_c = NULL
path_b = path.encode("utf-8")
path_c = path_b
paths = CSLAddString(paths, path_c)
OSRSetPROJSearchPaths(<const char *const *>paths)
def has_gdal_data():
"""Verify that GDAL library data files are correctly found.
Adapted from Fiona (_env.pyx).
"""
if CPLFindFile("gdal", "header.dxf") != NULL:
return True
return False
def get_gdal_data_path():
"""
Get the path to the directory GDAL uses to read data files.
"""
cdef const char *path_c = CPLFindFile("gdal", "header.dxf")
if path_c != NULL:
return get_string(path_c).rstrip("header.dxf")
return None
def has_proj_data():
"""Verify that PROJ library data files are correctly found.
Returns
-------
bool
True if a test spatial reference object could be created, which verifies
that data files are correctly loaded.
Adapted from Fiona (_env.pyx).
"""
cdef OGRSpatialReferenceH srs = OSRNewSpatialReference(NULL)
try:
exc_wrap_ogrerr(exc_wrap_int(OSRImportFromEPSG(srs, 4326)))
except CPLE_BaseError:
return False
else:
return True
finally:
if srs != NULL:
OSRRelease(srs)
def init_gdal_data():
"""Set GDAL data search directories in the following precedence:
- wheel copy of gdal_data
- default detection by GDAL, including GDAL_DATA (detected automatically by GDAL)
- other well-known paths under sys.prefix
Adapted from Fiona (env.py, _env.pyx).
"""
# wheels are packaged to include GDAL data files at pyogrio/gdal_data
wheel_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "gdal_data"))
if os.path.exists(wheel_path):
set_gdal_config_options({"GDAL_DATA": wheel_path})
if not has_gdal_data():
raise ValueError("Could not correctly detect GDAL data files installed by pyogrio wheel")
return
# GDAL correctly found data files from GDAL_DATA or compiled-in paths
if has_gdal_data():
return
wk_path = os.path.join(sys.prefix, 'share', 'gdal')
if os.path.exists(wk_path):
set_gdal_config_options({"GDAL_DATA": wk_path})
if not has_gdal_data():
raise ValueError(f"Found GDAL data directory at {wk_path} but it does not appear to correctly contain GDAL data files")
return
warnings.warn("Could not detect GDAL data files. Set GDAL_DATA environment variable to the correct path.", RuntimeWarning)
def init_proj_data():
"""Set Proj search directories in the following precedence:
- wheel copy of proj_data
- default detection by PROJ, including PROJ_LIB (detected automatically by PROJ)
- search other well-known paths under sys.prefix
Adapted from Fiona (env.py, _env.pyx).
"""
# wheels are packaged to include PROJ data files at pyogrio/proj_data
wheel_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "proj_data"))
if os.path.exists(wheel_path):
set_proj_search_path(wheel_path)
# verify that this now resolves
if not has_proj_data():
raise ValueError("Could not correctly detect PROJ data files installed by pyogrio wheel")
return
# PROJ correctly found data files from PROJ_LIB or compiled-in paths
if has_proj_data():
return
wk_path = os.path.join(sys.prefix, 'share', 'proj')
if os.path.exists(wk_path):
set_proj_search_path(wk_path)
# verify that this now resolves
if not has_proj_data():
raise ValueError(f"Found PROJ data directory at {wk_path} but it does not appear to correctly contain PROJ data files")
return
warnings.warn("Could not detect PROJ data files. Set PROJ_LIB environment variable to the correct path.", RuntimeWarning)
def _register_drivers():
# Register all drivers
GDALAllRegister()
def _get_driver_metadata_item(driver, metadata_item):
"""
Query driver metadata items.
Parameters
----------
driver : str
Driver to query
metadata_item : str
Metadata item to query
Returns
-------
str or None
Metadata item
"""
cdef const char* metadata_c = NULL
cdef void *cogr_driver = NULL
try:
cogr_driver = exc_wrap_pointer(GDALGetDriverByName(driver.encode('UTF-8')))
except NullPointerError:
raise DataSourceError(
f"Could not obtain driver: {driver} (check that it was installed "
"correctly into GDAL)"
)
except CPLE_BaseError as exc:
raise DataSourceError(str(exc))
metadata_c = GDALGetMetadataItem(cogr_driver, metadata_item.encode('UTF-8'), NULL)
metadata = None
if metadata_c != NULL:
metadata = metadata_c
metadata = metadata.decode('UTF-8')
if len(metadata) == 0:
metadata = None
return metadata
def _get_drivers_for_path(path):
cdef OGRSFDriverH driver = NULL
cdef int i
cdef char *name_c
path = str(path).lower()
parts = os.path.splitext(path)
if len(parts) == 2 and len(parts[1]) > 1:
ext = parts[1][1:]
else:
ext = None
# allow specific drivers to have a .zip extension to match GDAL behavior
if ext == 'zip':
if path.endswith('.shp.zip'):
ext = 'shp.zip'
elif path.endswith('.gpkg.zip'):
ext = 'gpkg.zip'
drivers = []
for i in range(OGRGetDriverCount()):
driver = OGRGetDriver(i)
name_c = <char *>OGR_Dr_GetName(driver)
name = get_string(name_c)
if not ogr_driver_supports_write(name):
continue
# extensions is a space-delimited list of supported extensions
# for driver
extensions = _get_driver_metadata_item(name, "DMD_EXTENSIONS")
if ext is not None and extensions is not None and ext in extensions.lower().split(' '):
drivers.append(name)
else:
prefix = _get_driver_metadata_item(name, "DMD_CONNECTION_PREFIX")
if prefix is not None and path.startswith(prefix.lower()):
drivers.append(name)
return drivers

View File

@@ -8,11 +8,11 @@ import json
version_json = '''
{
"date": "2023-10-30T11:39:03-0700",
"date": "2024-09-28T11:22:57-0700",
"dirty": false,
"error": null,
"full-revisionid": "71acde57ef674c8622d17b29663ff4349b1fee6e",
"version": "0.7.2"
"full-revisionid": "eb8e7889224155ffa0f779360db29f07f370eef1",
"version": "0.10.0"
}
''' # END VERSION_JSON

View File

@@ -1,115 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
// This file is an extract https://github.com/apache/arrow/blob/master/cpp/src/arrow/c/abi.h
// commit 9cbb8a1a626ee301cfe85905b6c18c5d880e176b (2022-06-14)
// WARNING: DO NOT MODIFY the content as it would break interoperability !
#pragma once
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
#ifndef ARROW_C_DATA_INTERFACE
#define ARROW_C_DATA_INTERFACE
#define ARROW_FLAG_DICTIONARY_ORDERED 1
#define ARROW_FLAG_NULLABLE 2
#define ARROW_FLAG_MAP_KEYS_SORTED 4
struct ArrowSchema {
// Array type description
const char* format;
const char* name;
const char* metadata;
int64_t flags;
int64_t n_children;
struct ArrowSchema** children;
struct ArrowSchema* dictionary;
// Release callback
void (*release)(struct ArrowSchema*);
// Opaque producer-specific data
void* private_data;
};
struct ArrowArray {
// Array data description
int64_t length;
int64_t null_count;
int64_t offset;
int64_t n_buffers;
int64_t n_children;
const void** buffers;
struct ArrowArray** children;
struct ArrowArray* dictionary;
// Release callback
void (*release)(struct ArrowArray*);
// Opaque producer-specific data
void* private_data;
};
#endif // ARROW_C_DATA_INTERFACE
#ifndef ARROW_C_STREAM_INTERFACE
#define ARROW_C_STREAM_INTERFACE
struct ArrowArrayStream {
// Callback to get the stream type
// (will be the same for all arrays in the stream).
//
// Return value: 0 if successful, an `errno`-compatible error code otherwise.
//
// If successful, the ArrowSchema must be released independently from the stream.
int (*get_schema)(struct ArrowArrayStream*, struct ArrowSchema* out);
// Callback to get the next array
// (if no error and the array is released, the stream has ended)
//
// Return value: 0 if successful, an `errno`-compatible error code otherwise.
//
// If successful, the ArrowArray must be released independently from the stream.
int (*get_next)(struct ArrowArrayStream*, struct ArrowArray* out);
// Callback to get optional detailed error information.
// This must only be called if the last stream operation failed
// with a non-0 return code.
//
// Return value: pointer to a null-terminated character array describing
// the last error, or NULL if no description is available.
//
// The returned pointer is only valid until the next operation on this stream
// (including release).
const char* (*get_last_error)(struct ArrowArrayStream*);
// Release callback: release the stream's own resources.
// Note that arrays returned by `get_next` must be individually released.
void (*release)(struct ArrowArrayStream*);
// Opaque producer-specific data
void* private_data;
};
#endif // ARROW_C_STREAM_INTERFACE
#ifdef __cplusplus
}
#endif

View File

@@ -1,24 +1,36 @@
from pyogrio._env import GDALEnv
from pyogrio.util import get_vsi_path, _preprocess_options_key_value, _mask_to_wkb
"""Core functions to interact with OGR data sources."""
from pathlib import Path
from typing import Optional, Union
from pyogrio._env import GDALEnv
from pyogrio.util import (
_mask_to_wkb,
_preprocess_options_key_value,
get_vsi_path_or_buffer,
)
with GDALEnv():
from pyogrio._ogr import (
get_gdal_version,
get_gdal_version_string,
get_gdal_geos_version,
ogr_list_drivers,
set_gdal_config_options as _set_gdal_config_options,
get_gdal_config_option as _get_gdal_config_option,
get_gdal_data_path as _get_gdal_data_path,
init_gdal_data as _init_gdal_data,
init_proj_data as _init_proj_data,
remove_virtual_file,
_register_drivers,
_get_drivers_for_path,
)
from pyogrio._err import _register_error_handler
from pyogrio._io import ogr_list_layers, ogr_read_bounds, ogr_read_info
from pyogrio._ogr import (
_get_drivers_for_path,
_register_drivers,
get_gdal_config_option as _get_gdal_config_option,
get_gdal_data_path as _get_gdal_data_path,
get_gdal_geos_version,
get_gdal_version,
get_gdal_version_string,
init_gdal_data as _init_gdal_data,
init_proj_data as _init_proj_data,
ogr_list_drivers,
set_gdal_config_options as _set_gdal_config_options,
)
from pyogrio._vsi import (
ogr_vsi_listtree,
ogr_vsi_rmtree,
ogr_vsi_unlink,
)
_init_gdal_data()
_init_proj_data()
@@ -45,8 +57,8 @@ def list_drivers(read=False, write=False):
dict
Mapping of driver name to file mode capabilities: ``"r"``: read, ``"w"``: write.
Drivers that are available but with unknown support are marked with ``"?"``
"""
"""
drivers = ogr_list_drivers()
if read:
@@ -59,8 +71,9 @@ def list_drivers(read=False, write=False):
def detect_write_driver(path):
"""Attempt to infer the driver for a path by extension or prefix. Only
drivers that support write capabilities will be detected.
"""Attempt to infer the driver for a path by extension or prefix.
Only drivers that support write capabilities will be detected.
If the path cannot be resolved to a single driver, a ValueError will be
raised.
@@ -68,11 +81,13 @@ def detect_write_driver(path):
Parameters
----------
path : str
data source path
Returns
-------
str
name of the driver, if detected
"""
# try to infer driver from path
drivers = _get_drivers_for_path(path)
@@ -102,22 +117,17 @@ def list_layers(path_or_buffer, /):
Parameters
----------
path : str or pathlib.Path
path_or_buffer : str, pathlib.Path, bytes, or file-like
A dataset path or URI, raw buffer, or file-like object with a read method.
Returns
-------
ndarray shape (2, n)
array of pairs of [<layer name>, <layer geometry type>]
Note: geometry is `None` for nonspatial layers.
"""
path, buffer = get_vsi_path(path_or_buffer)
try:
result = ogr_list_layers(path)
finally:
if buffer is not None:
remove_virtual_file(path)
return result
"""
return ogr_list_layers(get_vsi_path_or_buffer(path_or_buffer))
def read_bounds(
@@ -138,8 +148,8 @@ def read_bounds(
Parameters
----------
path : pathlib.Path or str
data source path
path_or_buffer : str, pathlib.Path, bytes, or file-like
A dataset path or URI, raw buffer, or file-like object with a read method.
layer : int or str, optional (default: first layer)
If an integer is provided, it corresponds to the index of the layer
with the data source. If a string is provided, it must match the name
@@ -176,23 +186,17 @@ def read_bounds(
fids are global IDs read from the FID field of the dataset
bounds are ndarray of shape(4, n) containing ``xmin``, ``ymin``, ``xmax``,
``ymax``
"""
path, buffer = get_vsi_path(path_or_buffer)
try:
result = ogr_read_bounds(
path,
layer=layer,
skip_features=skip_features,
max_features=max_features or 0,
where=where,
bbox=bbox,
mask=_mask_to_wkb(mask),
)
finally:
if buffer is not None:
remove_virtual_file(path)
return result
"""
return ogr_read_bounds(
get_vsi_path_or_buffer(path_or_buffer),
layer=layer,
skip_features=skip_features,
max_features=max_features or 0,
where=where,
bbox=bbox,
mask=_mask_to_wkb(mask),
)
def read_info(
@@ -217,9 +221,22 @@ def read_info(
driver or if the data source is nonspatial. You can force it to be calculated using
the ``force_total_bounds`` parameter.
``fid_column`` is the name of the FID field in the data source, if the FID is
physically stored (e.g. in GPKG). If the FID is just a sequence, ``fid_column``
will be "" (e.g. ESRI Shapefile).
``geometry_name`` is the name of the field where the main geometry is stored in the
data data source, if the field name can by customized (e.g. in GPKG). If no custom
name is supported, ``geometry_name`` will be "" (e.g. ESRI Shapefile).
``encoding`` will be ``UTF-8`` if either the native encoding is likely to be
``UTF-8`` or GDAL can automatically convert from the detected native encoding
to ``UTF-8``.
Parameters
----------
path : str or pathlib.Path
path_or_buffer : str, pathlib.Path, bytes, or file-like
A dataset path or URI, raw buffer, or file-like object with a read method.
layer : [type], optional
Name or index of layer in data source. Reads the first layer by default.
encoding : [type], optional (default: None)
@@ -240,11 +257,14 @@ def read_info(
A dictionary with the following keys::
{
"layer_name": "<layer name>",
"crs": "<crs>",
"fields": <ndarray of field names>,
"dtypes": <ndarray of field dtypes>,
"encoding": "<encoding>",
"geometry": "<geometry type>",
"fid_column": "<fid column name or "">",
"geometry_name": "<geometry column name or "">",
"geometry_type": "<geometry type>",
"features": <feature count or -1>,
"total_bounds": <tuple with total bounds or None>,
"driver": "<driver>",
@@ -252,24 +272,18 @@ def read_info(
"dataset_metadata": "<dict of dataset metadata or None>"
"layer_metadata": "<dict of layer metadata or None>"
}
"""
path, buffer = get_vsi_path(path_or_buffer)
"""
dataset_kwargs = _preprocess_options_key_value(kwargs) if kwargs else {}
try:
result = ogr_read_info(
path,
layer=layer,
encoding=encoding,
force_feature_count=force_feature_count,
force_total_bounds=force_total_bounds,
dataset_kwargs=dataset_kwargs,
)
finally:
if buffer is not None:
remove_virtual_file(path)
return result
return ogr_read_info(
get_vsi_path_or_buffer(path_or_buffer),
layer=layer,
encoding=encoding,
force_feature_count=force_feature_count,
force_total_bounds=force_total_bounds,
dataset_kwargs=dataset_kwargs,
)
def set_gdal_config_options(options):
@@ -289,8 +303,8 @@ def set_gdal_config_options(options):
configuration options. ``True`` / ``False`` are normalized to ``'ON'``
/ ``'OFF'``. A value of ``None`` for a config option can be used to clear out a
previously set value.
"""
"""
_set_gdal_config_options(options)
@@ -306,8 +320,8 @@ def get_gdal_config_option(name):
-------
value of the option or None if not set
``'ON'`` / ``'OFF'`` are normalized to ``True`` / ``False``.
"""
"""
return _get_gdal_config_option(name)
@@ -317,5 +331,56 @@ def get_gdal_data_path():
Returns
-------
str, or None if data directory was not found
"""
return _get_gdal_data_path()
def vsi_listtree(path: Union[str, Path], pattern: Optional[str] = None):
"""Recursively list the contents of a VSI directory.
An fnmatch pattern can be specified to filter the directories/files
returned.
Parameters
----------
path : str or pathlib.Path
Path to the VSI directory to be listed.
pattern : str, optional
Pattern to filter results, in fnmatch format.
"""
if isinstance(path, Path):
path = path.as_posix()
return ogr_vsi_listtree(path, pattern=pattern)
def vsi_rmtree(path: Union[str, Path]):
"""Recursively remove VSI directory.
Parameters
----------
path : str or pathlib.Path
path to the VSI directory to be removed.
"""
if isinstance(path, Path):
path = path.as_posix()
ogr_vsi_rmtree(path)
def vsi_unlink(path: Union[str, Path]):
"""Remove a VSI file.
Parameters
----------
path : str or pathlib.Path
path to vsimem file to be removed
"""
if isinstance(path, Path):
path = path.as_posix()
ogr_vsi_unlink(path)

View File

@@ -1,32 +1,25 @@
class DataSourceError(RuntimeError):
"""Errors relating to opening or closing an OGRDataSource (with >= 1 layers)"""
"""Custom errors."""
pass
class DataSourceError(RuntimeError):
"""Errors relating to opening or closing an OGRDataSource (with >= 1 layers)."""
class DataLayerError(RuntimeError):
"""Errors relating to working with a single OGRLayer"""
pass
"""Errors relating to working with a single OGRLayer."""
class CRSError(DataLayerError):
"""Errors relating to getting or setting CRS values"""
pass
"""Errors relating to getting or setting CRS values."""
class FeatureError(DataLayerError):
"""Errors related to reading or writing a feature"""
pass
"""Errors related to reading or writing a feature."""
class GeometryError(DataLayerError):
"""Errors relating to getting or setting a geometry field"""
pass
"""Errors relating to getting or setting a geometry field."""
class FieldError(DataLayerError):
"""Errors relating to getting or setting a non-geometry field"""
"""Errors relating to getting or setting a non-geometry field."""

View File

@@ -8,12 +8,12 @@ set(CMAKE_IMPORT_FILE_VERSION 1)
# Import target "GDAL::GDAL" for configuration "Release"
set_property(TARGET GDAL::GDAL APPEND PROPERTY IMPORTED_CONFIGURATIONS RELEASE)
set_target_properties(GDAL::GDAL PROPERTIES
IMPORTED_LOCATION_RELEASE "${_IMPORT_PREFIX}/lib/libgdal.so.33.3.7.2"
IMPORTED_SONAME_RELEASE "libgdal.so.33"
IMPORTED_LOCATION_RELEASE "${_IMPORT_PREFIX}/lib/libgdal.so.35.3.9.1"
IMPORTED_SONAME_RELEASE "libgdal.so.35"
)
list(APPEND _cmake_import_check_targets GDAL::GDAL )
list(APPEND _cmake_import_check_files_for_GDAL::GDAL "${_IMPORT_PREFIX}/lib/libgdal.so.33.3.7.2" )
list(APPEND _cmake_import_check_files_for_GDAL::GDAL "${_IMPORT_PREFIX}/lib/libgdal.so.35.3.9.1" )
# Commands beyond this point should not need to know the version.
set(CMAKE_IMPORT_FILE_VERSION)

View File

@@ -7,7 +7,7 @@ if(CMAKE_VERSION VERSION_LESS "2.8.3")
message(FATAL_ERROR "CMake >= 2.8.3 required")
endif()
cmake_policy(PUSH)
cmake_policy(VERSION 2.8.3...3.25)
cmake_policy(VERSION 2.8.3...3.28)
#----------------------------------------------------------------
# Generated CMake target import file.
#----------------------------------------------------------------
@@ -74,9 +74,12 @@ set(_IMPORT_PREFIX)
# Loop over all imported files and verify that they actually exist
foreach(_cmake_target IN LISTS _cmake_import_check_targets)
foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}")
if(NOT EXISTS "${_cmake_file}")
message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file
if(CMAKE_VERSION VERSION_LESS "3.28"
OR NOT DEFINED _cmake_import_check_xcframework_for_${_cmake_target}
OR NOT IS_DIRECTORY "${_cmake_import_check_xcframework_for_${_cmake_target}}")
foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}")
if(NOT EXISTS "${_cmake_file}")
message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file
\"${_cmake_file}\"
but this file does not exist. Possible reasons include:
* The file was deleted, renamed, or moved to another location.
@@ -85,8 +88,9 @@ but this file does not exist. Possible reasons include:
\"${CMAKE_CURRENT_LIST_FILE}\"
but not all the files it references.
")
endif()
endforeach()
endif()
endforeach()
endif()
unset(_cmake_file)
unset("_cmake_import_check_files_for_${_cmake_target}")
endforeach()

View File

@@ -14,7 +14,6 @@ list(APPEND CMAKE_PROGRAM_PATH "${vcpkg_host_prefix}/tools/pkgconf")
include("${CMAKE_CURRENT_LIST_DIR}/DefineFindPackage2.cmake")
include("${CMAKE_CURRENT_LIST_DIR}/GdalFindModulePath.cmake")
find_dependency(Threads)
find_dependency(PROJ 9 CONFIG)
if(DEFINED _gdal_module_path_backup)
set(CMAKE_MODULE_PATH "${_gdal_module_path_backup}")

View File

@@ -10,13 +10,13 @@
# The variable CVF_VERSION must be set before calling configure_file().
set(PACKAGE_VERSION "3.7.2")
set(PACKAGE_VERSION "3.9.1")
if(PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION)
set(PACKAGE_VERSION_COMPATIBLE FALSE)
else()
if("3.7.2" MATCHES "^([0-9]+)\\.([0-9]+)")
if("3.9.1" MATCHES "^([0-9]+)\\.([0-9]+)")
set(CVF_VERSION_MAJOR "${CMAKE_MATCH_1}")
set(CVF_VERSION_MINOR "${CMAKE_MATCH_2}")
@@ -27,7 +27,7 @@ else()
string(REGEX REPLACE "^0+" "" CVF_VERSION_MINOR "${CVF_VERSION_MINOR}")
endif()
else()
set(CVF_VERSION_MAJOR "3.7.2")
set(CVF_VERSION_MAJOR "3.9.1")
set(CVF_VERSION_MINOR "")
endif()

View File

@@ -1,201 +0,0 @@
<?xml version="1.0"?>
<gmi:MI_Metadata xmlns:gmi="http://www.isotc211.org/2005/gmi" xmlns:gmd="http://www.isotc211.org/2005/gmd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:gml="http://www.opengis.net/gml/3.2" xmlns:gco="http://www.isotc211.org/2005/gco" xmlns:bag="http://www.opennavsurf.org/schema/bag">
<gmd:language>
<gmd:LanguageCode codeList="http://www.loc.gov/standards/iso639-2/" codeListValue="eng">eng</gmd:LanguageCode>
</gmd:language>
<gmd:contact>
<gmd:CI_ResponsibleParty>
<gmd:individualName>
<gco:CharacterString>${INDIVIDUAL_NAME:unknown}</gco:CharacterString>
</gmd:individualName>
<gmd:organisationName>
<gco:CharacterString>${ORGANISATION_NAME:unknown}</gco:CharacterString>
</gmd:organisationName>
<gmd:positionName>
<gco:CharacterString>${POSITION_NAME:unknown}</gco:CharacterString>
</gmd:positionName>
<gmd:role>
<gmd:CI_RoleCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#CI_RoleCode" codeListValue="${CONTACT_ROLE:author}">${CONTACT_ROLE:author}</gmd:CI_RoleCode>
</gmd:role>
</gmd:CI_ResponsibleParty>
</gmd:contact>
<gmd:dateStamp>
<gco:Date>${DATE}</gco:Date>
</gmd:dateStamp>
<gmd:metadataStandardName>
<gco:CharacterString>${METADATA_STANDARD_NAME:ISO 19139}</gco:CharacterString>
</gmd:metadataStandardName>
<gmd:metadataStandardVersion>
<gco:CharacterString>${METADATA_STANDARD_VERSION:1.1.0}</gco:CharacterString>
</gmd:metadataStandardVersion>
<gmd:spatialRepresentationInfo>
<gmd:MD_Georectified>
<gmd:numberOfDimensions>
<gco:Integer>2</gco:Integer>
</gmd:numberOfDimensions>
<gmd:axisDimensionProperties>
<gmd:MD_Dimension>
<gmd:dimensionName>
<gmd:MD_DimensionNameTypeCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#MD_DimensionNameTypeCode" codeListValue="row">row</gmd:MD_DimensionNameTypeCode>
</gmd:dimensionName>
<gmd:dimensionSize>
<gco:Integer>${HEIGHT}</gco:Integer>
</gmd:dimensionSize>
<gmd:resolution>
<gco:Measure uom="${RES_UNIT}">${RESY}</gco:Measure>
</gmd:resolution>
</gmd:MD_Dimension>
</gmd:axisDimensionProperties>
<gmd:axisDimensionProperties>
<gmd:MD_Dimension>
<gmd:dimensionName>
<gmd:MD_DimensionNameTypeCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#MD_DimensionNameTypeCode" codeListValue="column">column</gmd:MD_DimensionNameTypeCode>
</gmd:dimensionName>
<gmd:dimensionSize>
<gco:Integer>${WIDTH}</gco:Integer>
</gmd:dimensionSize>
<gmd:resolution>
<gco:Measure uom="${RES_UNIT}">${RESX}</gco:Measure>
</gmd:resolution>
</gmd:MD_Dimension>
</gmd:axisDimensionProperties>
<gmd:cellGeometry>
<gmd:MD_CellGeometryCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#MD_CellGeometryCode" codeListValue="point">point</gmd:MD_CellGeometryCode>
</gmd:cellGeometry>
<gmd:transformationParameterAvailability>
<gco:Boolean>1</gco:Boolean>
</gmd:transformationParameterAvailability>
<gmd:checkPointAvailability>
<gco:Boolean>0</gco:Boolean>
</gmd:checkPointAvailability>
<gmd:cornerPoints>
<gml:Point gml:id="id1">
<gml:coordinates decimal="." cs="," ts=" ">${CORNER_POINTS}</gml:coordinates>
</gml:Point>
</gmd:cornerPoints>
<gmd:pointInPixel>
<gmd:MD_PixelOrientationCode>center</gmd:MD_PixelOrientationCode>
</gmd:pointInPixel>
</gmd:MD_Georectified>
</gmd:spatialRepresentationInfo>
<gmd:referenceSystemInfo>
<gmd:MD_ReferenceSystem>
<gmd:referenceSystemIdentifier>
<gmd:RS_Identifier>
<gmd:code>
<gco:CharacterString>${HORIZ_WKT}</gco:CharacterString>
</gmd:code>
<gmd:codeSpace>
<gco:CharacterString>WKT</gco:CharacterString>
</gmd:codeSpace>
</gmd:RS_Identifier>
</gmd:referenceSystemIdentifier>
</gmd:MD_ReferenceSystem>
</gmd:referenceSystemInfo>
<gmd:referenceSystemInfo>
<gmd:MD_ReferenceSystem>
<gmd:referenceSystemIdentifier>
<gmd:RS_Identifier>
<gmd:code>
<gco:CharacterString>${VERT_WKT:VERT_CS["unknown", VERT_DATUM["unknown", 2000]]}</gco:CharacterString>
</gmd:code>
<gmd:codeSpace>
<gco:CharacterString>WKT</gco:CharacterString>
</gmd:codeSpace>
</gmd:RS_Identifier>
</gmd:referenceSystemIdentifier>
</gmd:MD_ReferenceSystem>
</gmd:referenceSystemInfo>
<gmd:identificationInfo>
<bag:BAG_DataIdentification>
<gmd:citation>${XML_IDENTIFICATION_CITATION:}</gmd:citation>
<gmd:abstract>
<gco:CharacterString>${ABSTRACT:}</gco:CharacterString>
</gmd:abstract>
<gmd:spatialRepresentationType>
<gmd:MD_SpatialRepresentationTypeCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#MD_SpatialRepresentationTypeCode" codeListValue="grid">grid</gmd:MD_SpatialRepresentationTypeCode>
</gmd:spatialRepresentationType>
<gmd:spatialResolution>
<gmd:MD_Resolution>
<gmd:distance>
<gco:Distance uom="${RES_UNIT}">${RES}</gco:Distance>
</gmd:distance>
</gmd:MD_Resolution>
</gmd:spatialResolution>
<gmd:language>
<gmd:LanguageCode codeList="http://www.loc.gov/standards/iso639-2/" codeListValue="eng">eng</gmd:LanguageCode>
</gmd:language>
<gmd:topicCategory>
<gmd:MD_TopicCategoryCode>elevation</gmd:MD_TopicCategoryCode>
</gmd:topicCategory>
<gmd:extent>
<gmd:EX_Extent>
<gmd:geographicElement>
<gmd:EX_GeographicBoundingBox>
<gmd:westBoundLongitude>
<gco:Decimal>${WEST_LONGITUDE}</gco:Decimal>
</gmd:westBoundLongitude>
<gmd:eastBoundLongitude>
<gco:Decimal>${EAST_LONGITUDE}</gco:Decimal>
</gmd:eastBoundLongitude>
<gmd:southBoundLatitude>
<gco:Decimal>${SOUTH_LATITUDE}</gco:Decimal>
</gmd:southBoundLatitude>
<gmd:northBoundLatitude>
<gco:Decimal>${NORTH_LATITUDE}</gco:Decimal>
</gmd:northBoundLatitude>
</gmd:EX_GeographicBoundingBox>
</gmd:geographicElement>
</gmd:EX_Extent>
</gmd:extent>
<bag:verticalUncertaintyType>
<bag:BAG_VertUncertCode codeList="http://www.opennavsurf.org/schema/bag/bagCodelists.xml#BAG_VertUncertCode" codeListValue="${VERTICAL_UNCERT_CODE:unknown}">${VERTICAL_UNCERT_CODE:unknown}</bag:BAG_VertUncertCode>
</bag:verticalUncertaintyType>
</bag:BAG_DataIdentification>
</gmd:identificationInfo>
<gmd:dataQualityInfo>
<gmd:DQ_DataQuality>
<gmd:scope>
<gmd:DQ_Scope>
<gmd:level>
<gmd:MD_ScopeCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#MD_ScopeCode" codeListValue="dataset">dataset</gmd:MD_ScopeCode>
</gmd:level>
</gmd:DQ_Scope>
</gmd:scope>
<gmd:lineage>
<gmd:LI_Lineage>
<gmd:processStep>
<gmd:LI_ProcessStep>
<gmd:description>
<gco:CharacterString>${PROCESS_STEP_DESCRIPTION}</gco:CharacterString>
</gmd:description>
<gmd:dateTime>
<gco:DateTime>${DATETIME}</gco:DateTime>
</gmd:dateTime>
</gmd:LI_ProcessStep>
</gmd:processStep>
</gmd:LI_Lineage>
</gmd:lineage>
</gmd:DQ_DataQuality>
</gmd:dataQualityInfo>
<gmd:metadataConstraints>
<gmd:MD_LegalConstraints>
<gmd:useConstraints>
<gmd:MD_RestrictionCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#MD_RestrictionCode" codeListValue="${RESTRICTION_CODE:otherRestrictions}">${RESTRICTION_CODE:otherRestrictions}</gmd:MD_RestrictionCode>
</gmd:useConstraints>
<gmd:otherConstraints>
<gco:CharacterString>${RESTRICTION_OTHER_CONSTRAINTS:unknown}</gco:CharacterString>
</gmd:otherConstraints>
</gmd:MD_LegalConstraints>
</gmd:metadataConstraints>
<gmd:metadataConstraints>
<gmd:MD_SecurityConstraints>
<gmd:classification>
<gmd:MD_ClassificationCode codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#MD_ClassificationCode" codeListValue="${CLASSIFICATION:unclassified}">${CLASSIFICATION:unclassified}</gmd:MD_ClassificationCode>
</gmd:classification>
<gmd:userNote>
<gco:CharacterString>${SECURITY_USER_NOTE:none}</gco:CharacterString>
</gmd:userNote>
</gmd:MD_SecurityConstraints>
</gmd:metadataConstraints>
</gmi:MI_Metadata>

View File

@@ -176,6 +176,7 @@
}
},
"size": {
"$comment": "note that the order of items in side is width,height",
"$ref": "#/definitions/arrayOfTwoIntegers"
},
"coordinateSystem": {
@@ -306,6 +307,7 @@
},
"proj:shape": {
"$comment": "note that the order of items in proj:shape is height,width starting with GDAL 3.8.5 (previous versions ordered it wrongly as width,height)",
"title": "Shape",
"type": "array",
"minItems": 2,

View File

@@ -30,30 +30,76 @@
****************************************************************************/
-->
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified" version="1.0">
<xs:element name="VRTDataset">
<xs:complexType>
<xs:sequence>
<xs:choice minOccurs="0" maxOccurs="unbounded">
<xs:element name="SRS" type="SRSType"/>
<xs:element name="GeoTransform" type="xs:string"/>
<xs:element name="GCPList" type="GCPListType"/>
<xs:element name="BlockXSize" type="nonNegativeInteger32"/>
<xs:element name="BlockYSize" type="nonNegativeInteger32"/>
<xs:element name="Metadata" type="MetadataType"/> <!-- may be repeated -->
<xs:element name="VRTRasterBand" type="VRTRasterBandType"/> <!-- may be repeated -->
<xs:element name="MaskBand" type="MaskBandType"/>
<xs:element name="GDALWarpOptions" type="GDALWarpOptionsType"/> <!-- only if subClass="VRTWarpedDataset" -->
<xs:element name="PansharpeningOptions" type="PansharpeningOptionsType"/> <!-- only if subClass="VRTPansharpenedDataset" -->
<xs:element name="Group" type="GroupType"/> <!-- only for multidimensional dataset -->
<xs:element name="OverviewList" type="OverviewListType"/>
</xs:choice>
</xs:sequence>
<xs:attribute name="subClass" type="xs:string"/>
<xs:attribute name="rasterXSize" type="nonNegativeInteger32"/>
<xs:attribute name="rasterYSize" type="nonNegativeInteger32"/>
</xs:complexType>
<xs:element name="VRTDataset" type="VRTDatasetType">
<xs:annotation>
<xs:documentation>Root element</xs:documentation>
</xs:annotation>
</xs:element>
<xs:complexType name="VRTDatasetType">
<xs:sequence>
<xs:choice minOccurs="0" maxOccurs="unbounded">
<xs:element name="SRS" type="SRSType"/>
<xs:element name="GeoTransform" type="xs:string"/>
<xs:element name="GCPList" type="GCPListType"/>
<xs:element name="BlockXSize" type="nonNegativeInteger32"/>
<xs:element name="BlockYSize" type="nonNegativeInteger32"/>
<xs:element name="Metadata" type="MetadataType">
<xs:annotation>
<xs:documentation>May be repeated</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element name="VRTRasterBand" type="VRTRasterBandType">
<xs:annotation>
<xs:documentation>May be repeated</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element name="MaskBand" type="MaskBandType"/>
<xs:element name="GDALWarpOptions" type="GDALWarpOptionsType">
<xs:annotation>
<xs:documentation>Allowed only if subClass="VRTWarpedDataset"</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element name="PansharpeningOptions" type="PansharpeningOptionsType">
<xs:annotation>
<xs:documentation>Allowed only if subClass="VRTPansharpenedDataset"</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element name="Input" type="InputType">
<xs:annotation>
<xs:documentation>Allowed only if subClass="VRTProcessedDataset"</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element name="ProcessingSteps" type="ProcessingStepsType">
<xs:annotation>
<xs:documentation>Allowed only if subClass="VRTProcessedDataset"</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element name="Group" type="GroupType">
<xs:annotation>
<xs:documentation>only for multidimensional dataset</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element name="OverviewList" type="OverviewListType"/>
</xs:choice>
</xs:sequence>
<xs:attribute name="subClass" type="DatasetSubclassType"/>
<xs:attribute name="rasterXSize" type="nonNegativeInteger32"/>
<xs:attribute name="rasterYSize" type="nonNegativeInteger32"/>
</xs:complexType>
<xs:simpleType name="DatasetSubclassType">
<xs:restriction base="xs:string">
<xs:enumeration value="VRTWarpedDataset"/>
<xs:enumeration value="VRTPansharpenedDataset"/>
<xs:enumeration value="VRTProcessedDataset">
<xs:annotation>
<xs:documentation>Added in GDAL 3.9</xs:documentation>
</xs:annotation>
</xs:enumeration>
</xs:restriction>
</xs:simpleType>
<xs:complexType name="OverviewListType">
<xs:simpleContent>
<xs:extension base="integerList">
@@ -138,6 +184,7 @@
<xs:complexType name="PanchroBandType">
<xs:sequence>
<xs:element name="SourceFilename" type="SourceFilenameType"/>
<xs:element name="OpenOptions" type="OpenOptionsType"/>
<xs:element name="SourceBand" type="xs:string"/> <!-- should be refined into xs:nonNegativeInteger or mask,xs:nonNegativeInteger -->
</xs:sequence>
</xs:complexType>
@@ -145,6 +192,7 @@
<xs:complexType name="SpectralBandType">
<xs:sequence>
<xs:element name="SourceFilename" type="SourceFilenameType"/>
<xs:element name="OpenOptions" type="OpenOptionsType"/>
<xs:element name="SourceBand" type="xs:string"/> <!-- should be refined into xs:nonNegativeInteger or mask,xs:nonNegativeInteger -->
</xs:sequence>
<xs:attribute name="dstBand" type="xs:nonNegativeInteger"/>
@@ -156,6 +204,51 @@
</xs:sequence>
</xs:complexType>
<xs:complexType name="InputType">
<xs:sequence>
<xs:choice minOccurs="0" maxOccurs="1">
<xs:element name="SourceFilename" type="SourceFilenameType"/>
<xs:element name="VRTDataset" type="VRTDatasetType"/>
</xs:choice>
</xs:sequence>
</xs:complexType>
<xs:complexType name="ProcessingStepsType">
<xs:sequence minOccurs="1" maxOccurs="unbounded">
<xs:element name="Step" type="ProcessingStepType"/>
</xs:sequence>
</xs:complexType>
<xs:complexType name="ProcessingStepType">
<xs:annotation>
<xs:documentation>Processing step of a VRTPansharpenedDataset</xs:documentation>
</xs:annotation>
<xs:sequence>
<xs:element name="Algorithm" type="xs:string" minOccurs="1">
<xs:annotation>
<xs:documentation>Builtin allowed names are BandAffineCombination, LUT, LocalScaleOffset, Trimming. More algorithms can be registered at run-time.</xs:documentation>
</xs:annotation>
</xs:element>
<xs:element name="Argument" type="ArgumentType" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="name" type="xs:string"/>
</xs:complexType>
<xs:complexType name="ArgumentType">
<xs:annotation>
<xs:documentation>Argument of a processing function</xs:documentation>
</xs:annotation>
<xs:simpleContent>
<xs:extension base="xs:string">
<xs:attribute name="name" type="xs:string" use="required">
<xs:annotation>
<xs:documentation>Allowed names are specific of each processing function</xs:documentation>
</xs:annotation>
</xs:attribute>
</xs:extension>
</xs:simpleContent>
</xs:complexType>
<xs:complexType name="MDIType">
<xs:simpleContent>
<xs:extension base="xs:string">
@@ -187,7 +280,9 @@
<xs:element name="SimpleSource" type="SimpleSourceType"/>
<xs:element name="ComplexSource" type="ComplexSourceType"/>
<xs:element name="AveragedSource" type="SimpleSourceType"/>
<xs:element name="NoDataFromMaskSource" type="NoDataFromMaskSourceType"/>
<xs:element name="KernelFilteredSource" type="KernelFilteredSourceType"/>
<xs:element name="ArraySource" type="ArraySourceType"/>
<!-- for a VRTDerivedRasterBand -->
<xs:element name="PixelFunctionType" type="xs:string"/>
@@ -230,6 +325,7 @@
<xs:enumeration value="VRTDerivedRasterBand"/>
<xs:enumeration value="VRTRawRasterBand"/>
<xs:enumeration value="VRTPansharpenedRasterBand"/>
<xs:enumeration value="VRTProcessedRasterBand"/>
</xs:restriction>
</xs:simpleType>
@@ -405,6 +501,21 @@
<xs:attribute name="resampling" type="xs:string"/>
</xs:complexType>
<xs:group name="NoDataFromMaskSourceElementsGroup">
<xs:sequence>
<xs:choice minOccurs="0" maxOccurs="unbounded">
<xs:group ref="SimpleSourceElementsGroup"/>
<xs:element name="NODATA" type="DoubleOrNanType"/> <!-- NODATA and UseMaskBand are mutually exclusive -->
<xs:element name="MaskValueThreshold" type="xs:double"/>
<xs:element name="RemappedValue" type="xs:double"/>
</xs:choice>
</xs:sequence>
</xs:group>
<xs:complexType name="NoDataFromMaskSourceType">
<xs:group ref="NoDataFromMaskSourceElementsGroup"/>
</xs:complexType>
<xs:complexType name="KernelFilteredSourceType">
<xs:sequence>
<xs:choice minOccurs="0" maxOccurs="unbounded">
@@ -435,6 +546,31 @@
</xs:restriction>
</xs:simpleType>
<xs:complexType name="ArraySourceType">
<xs:sequence>
<xs:element ref="AbstractArray"/>
<xs:element name="SrcRect" type="RectType" minOccurs="0"/>
<xs:element name="DstRect" type="RectType" minOccurs="0"/>
</xs:sequence>
</xs:complexType>
<xs:element name="AbstractArray" type="AbstractArrayType" abstract="true"/>
<xs:complexType name="AbstractArrayType"/>
<xs:element name="SingleSourceArray" substitutionGroup="AbstractArray" type="SingleSourceArrayType"/>
<xs:complexType name="SingleSourceArrayType">
<xs:complexContent>
<xs:extension base="AbstractArrayType">
<xs:sequence>
<xs:element name="SourceFilename" type="SourceFilenameType"/>
<xs:element name="SourceArray" type="xs:string"/>
</xs:sequence>
</xs:extension>
</xs:complexContent>
</xs:complexType>
<xs:complexType name="SourceFilenameType">
<xs:simpleContent>
<xs:extension base="xs:string">
@@ -606,4 +742,139 @@
<xs:attribute name="ref" type="xs:string" use="required"/>
</xs:complexType>
<xs:element name="DerivedArray" substitutionGroup="AbstractArray" type="DerivedArrayType"/>
<xs:complexType name="DerivedArrayType">
<xs:complexContent>
<xs:extension base="AbstractArrayType">
<xs:sequence>
<xs:element ref="AbstractArray"/>
<xs:element name="Step" type="StepType" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:extension>
</xs:complexContent>
</xs:complexType>
<xs:complexType name="StepType">
<xs:sequence minOccurs="0" maxOccurs="unbounded">
<xs:element ref="AbstractStep"/>
</xs:sequence>
</xs:complexType>
<xs:element name="AbstractStep" type="AbstractStepType" abstract="true"/>
<xs:complexType name="AbstractStepType"/>
<xs:element name="View" substitutionGroup="AbstractStep" type="ViewType"/>
<xs:complexType name="ViewType">
<xs:complexContent>
<xs:extension base="AbstractStepType">
<xs:attribute name="expr" type="xs:string" use="required"/>
</xs:extension>
</xs:complexContent>
</xs:complexType>
<xs:element name="Transpose" substitutionGroup="AbstractStep" type="TransposeType"/>
<xs:complexType name="TransposeType">
<xs:complexContent>
<xs:extension base="AbstractStepType">
<xs:attribute name="newOrder" type="CommaSeparatedListOfIntegerType" use="required"/>
</xs:extension>
</xs:complexContent>
</xs:complexType>
<xs:simpleType name="CommaSeparatedListOfIntegerType">
<xs:restriction base="xs:string">
<xs:pattern value="(\d)+(,(\d)+).*"/>
</xs:restriction>
</xs:simpleType>
<xs:element name="Resample" substitutionGroup="AbstractStep" type="ResampleType"/>
<xs:complexType name="ResampleType">
<xs:complexContent>
<xs:extension base="AbstractStepType">
<xs:sequence>
<xs:element name="Dimension" type="DimensionType" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="ResampleAlg" type="ResampleAlgType" minOccurs="0"/>
<xs:element name="SRS" type="SRSType" minOccurs="0"/>
<xs:element name="Option" type="OptionType" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:extension>
</xs:complexContent>
</xs:complexType>
<xs:simpleType name="ResampleAlgType">
<xs:restriction base="xs:string">
<xs:enumeration value="NearestNeighbour"/>
<xs:enumeration value="Bilinear"/>
<xs:enumeration value="Cubic"/>
<xs:enumeration value="CubicSpline"/>
<xs:enumeration value="Lanczos"/>
<xs:enumeration value="Average"/>
<xs:enumeration value="RMS"/>
<xs:enumeration value="Mode"/>
<xs:enumeration value="Gauss"/>
</xs:restriction>
</xs:simpleType>
<xs:complexType name="OptionType">
<xs:simpleContent>
<xs:extension base="xs:string">
<xs:attribute name="name" type="xs:string"/>
</xs:extension>
</xs:simpleContent>
</xs:complexType>
<xs:element name="Grid" substitutionGroup="AbstractStep" type="GridType"/>
<xs:complexType name="GridType">
<xs:complexContent>
<xs:extension base="AbstractStepType">
<xs:sequence>
<xs:element name="GridOptions" type="xs:string" minOccurs="1"/>
<xs:element name="XArray" minOccurs="0">
<xs:complexType>
<xs:sequence>
<xs:element ref="AbstractArray"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="YArray" minOccurs="0">
<xs:complexType>
<xs:sequence>
<xs:element ref="AbstractArray"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="Option" type="OptionType" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:extension>
</xs:complexContent>
</xs:complexType>
<xs:element name="GetMask" substitutionGroup="AbstractStep" type="GetMaskType"/>
<xs:complexType name="GetMaskType">
<xs:complexContent>
<xs:extension base="AbstractStepType">
<xs:sequence>
<xs:element name="Option" type="OptionType" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
</xs:extension>
</xs:complexContent>
</xs:complexType>
<xs:element name="GetUnscaled" substitutionGroup="AbstractStep" type="GetUnscaledType"/>
<xs:complexType name="GetUnscaledType">
<xs:complexContent>
<xs:extension base="AbstractStepType">
<xs:sequence/>
</xs:extension>
</xs:complexContent>
</xs:complexType>
</xs:schema>

View File

@@ -1,169 +0,0 @@
<!-- This file is under the public domain -->
<Configuration xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="gmlasconf.xsd">
<AllowRemoteSchemaDownload>true</AllowRemoteSchemaDownload>
<SchemaCache enabled="true">
<Directory/> <!-- empty: use $HOME/.gdal/gmlas_xsd_cache by default -->
</SchemaCache>
<SchemaAnalysisOptions>
<SchemaFullChecking>true</SchemaFullChecking>
<HandleMultipleImports>false</HandleMultipleImports>
</SchemaAnalysisOptions>
<Validation enabled="false">
<FailIfError>false</FailIfError>
</Validation>
<ExposeMetadataLayers>false</ExposeMetadataLayers>
<LayerBuildingRules>
<AlwaysGenerateOGRId>false</AlwaysGenerateOGRId>
<RemoveUnusedLayers>false</RemoveUnusedLayers>
<RemoveUnusedFields>false</RemoveUnusedFields>
<UseArrays>true</UseArrays>
<UseNullState>false</UseNullState>
<GML>
<IncludeGeometryXML>false</IncludeGeometryXML>
<InstantiateGMLFeaturesOnly>true</InstantiateGMLFeaturesOnly>
</GML>
<!-- 60 for PostgreSQL compatibility. The maximum is 64 but reserve
some space so that the spatial index name can be formed -->
<IdentifierMaxLength>60</IdentifierMaxLength>
<!-- Whether layer and field names should be consider equal in a
case insensitive way. This is important for conversion to
Postgres when identifiers are laundered in lower case -->
<CaseInsensitiveIdentifier>true</CaseInsensitiveIdentifier>
<!-- Launder identifiers like the OGR PG driver does.
Note: this laundering is safe for other backends as well. -->
<PostgreSQLIdentifierLaundering>true</PostgreSQLIdentifierLaundering>
<FlatteningRules>
<!-- Maximum number of fields allowed for element flattening -->
<MaximumNumberOfFields>10</MaximumNumberOfFields>
<Namespaces>
<Namespace prefix="swe" uri="http://www.opengis.net/swe/2.0"/>
</Namespaces>
<!-- Exception to MaximumNumberOfFields:
force this element(s) to be flattened even if they have more elements -->
<ForceFlatteningXPath>swe:values</ForceFlatteningXPath>
<!-- Exception to MaximumNumberOfFields:
prevent this element(s) from being flattened even if they have less elements -->
<!--
<DisableFlatteningXPath>...</DisableFlatteningXPath>
-->
</FlatteningRules>
<SWEProcessing>
<Activation>ifSWENamespaceFoundInTopElement</Activation>
<ProcessDataRecord>true</ProcessDataRecord>
<ProcessDataArray>true</ProcessDataArray>
</SWEProcessing>
</LayerBuildingRules>
<!-- constraints typically expressed as schematrons -->
<TypingConstraints>
<Namespaces>
<Namespace prefix="gwml2w" uri="http://www.opengis.net/gwml-well/2.2"/>
<Namespace prefix="om" uri="http://www.opengis.net/om/2.0"/>
</Namespaces>
<ChildConstraint>
<ContainerXPath>gwml2w:GW_GeologyLog/om:result</ContainerXPath>
<ChildrenElements>
<Element>gwml2w:GW_GeologyLogCoverage</Element>
</ChildrenElements>
</ChildConstraint>
</TypingConstraints>
<XLinkResolution>
<Timeout>10</Timeout> <!-- can be set with GDAL_HTTP_TIMEOUT -->
<!-- <MaxGlobalResolutionTime></MaxGlobalResolutionTime> -->
<MaxFileSize>1048576</MaxFileSize>
<!--
<ProxyServerPort>myproxy.com:8080</ProxyServerPort> Can be set with GDAL_HTTP_PROXY
<ProxyUserPassword>user:password<ProxyUserPassword> Can be set with GDAL_HTTP_PROXYUSERPW
<ProxyAuth>Basic or NTLM or Digest or Any</ProxyAuth> Can be set with GDAL_PROXY_AUTH
-->
<CacheDirectory/> <!-- empty: use $HOME/.gdal/gmlas_xlink_resolution_cache by default -->
<DefaultResolution enabled="false">
<AllowRemoteDownload>true</AllowRemoteDownload>
<ResolutionMode>RawContent</ResolutionMode>
<ResolutionDepth>1</ResolutionDepth>
<CacheResults>false</CacheResults>
</DefaultResolution>
<!--
<URLSpecificResolution>
<URLPrefix>http://inspire.ec.europa.eu/codelist</URLPrefix>
<HTTPHeader>
<Name>Accept</Name>
<Value>application/x-iso19135+xml</Value>
</HTTPHeader>
<HTTPHeader>
<Name>Accept-Language</Name>
<Value>en</Value>
</HTTPHeader>
<AllowRemoteDownload>true</AllowRemoteDownload>
<ResolutionMode>FieldsFromXPath</ResolutionMode>
<ResolutionDepth>1</ResolutionDepth>
<CacheResults>true</CacheResults>
<Field>
<Name>name</Name>
<Type>string</Type>
<XPath>RE_RegisterItem/name/gco:CharacterString</XPath>
</Field>
<Field>
<Name>definition</Name>
<Type>string</Type>
<XPath>RE_RegisterItem/definition/gco:CharacterString</XPath>
</Field>
</URLSpecificResolution>
-->
<ResolveInternalXLinks>true</ResolveInternalXLinks>
</XLinkResolution>
<IgnoredXPaths>
<WarnIfIgnoredXPathFoundInDocInstance>true</WarnIfIgnoredXPathFoundInDocInstance>
<Namespaces>
<Namespace prefix="gml" uri="http://www.opengis.net/gml"/>
<Namespace prefix="gml32" uri="http://www.opengis.net/gml/3.2"/>
<Namespace prefix="swe" uri="http://www.opengis.net/swe/2.0"/>
</Namespaces>
<XPath warnIfIgnoredXPathFoundInDocInstance="false">gml:boundedBy</XPath>
<XPath warnIfIgnoredXPathFoundInDocInstance="false">gml32:boundedBy</XPath>
<XPath>gml:priorityLocation</XPath>
<XPath>gml32:priorityLocation</XPath>
<XPath>gml32:descriptionReference/@owns</XPath>
<XPath>@xlink:show</XPath>
<XPath>@xlink:type</XPath>
<XPath>@xlink:role</XPath>
<XPath>@xlink:arcrole</XPath>
<XPath>@xlink:actuate</XPath>
<XPath>@gml:remoteSchema</XPath>
<XPath>@gml32:remoteSchema</XPath>
<XPath>swe:Quantity/swe:extension</XPath>
<XPath>swe:Quantity/@referenceFrame</XPath>
<XPath>swe:Quantity/@axisID</XPath>
<XPath>swe:Quantity/@updatable</XPath>
<XPath>swe:Quantity/@optional</XPath>
<XPath>swe:Quantity/@id</XPath>
<XPath>swe:Quantity/swe:identifier</XPath>
<!-- <XPath>swe:Quantity/@definition</XPath> -->
<XPath>swe:Quantity/swe:label</XPath>
<XPath>swe:Quantity/swe:nilValues</XPath>
<XPath>swe:Quantity/swe:constraint</XPath>
<XPath>swe:Quantity/swe:quality</XPath>
</IgnoredXPaths>
<!-- Section for GMLAS writer config -->
<WriterConfig>
<IndentationSize>2</IndentationSize>
<Comment/>
<LineFormat>NATIVE</LineFormat>
<SRSNameFormat>OGC_URL</SRSNameFormat>
<Wrapping>WFS2_FEATURECOLLECTION</Wrapping>
<!-- <Timestamp></Timestamp> -->
<WFS20SchemaLocation>http://schemas.opengis.net/wfs/2.0/wfs.xsd</WFS20SchemaLocation>
</WriterConfig>
</Configuration>

View File

@@ -1,143 +0,0 @@
<?xml version="1.0"?>
<!--
/******************************************************************************
* $Id$
*
* Project: netCDF driver
* Purpose: Schema of netCDF writer configuration files
* Author: Even Rouault, <even dot rouault at spatialys dot com>
*
**********************************************************************
* Copyright (c) 2016, Even Rouault <even dot rouault at spatialys dot com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
****************************************************************************/
-->
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:element name="Configuration">
<xs:complexType>
<xs:sequence>
<xs:element name="DatasetCreationOption" type="optionType" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="LayerCreationOption" type="optionType" minOccurs="0" maxOccurs="unbounded">
<xs:annotation><xs:documentation>
Define a layer creation option that applies to all layers.
</xs:documentation></xs:annotation>
</xs:element>
<xs:element name="Attribute" type="attributeType" minOccurs="0" maxOccurs="unbounded">
<xs:annotation><xs:documentation>
Define a global attribute that must be written (or removed) and applies to all layers.
</xs:documentation></xs:annotation>
</xs:element>
<xs:element name="Field" type="fieldType" minOccurs="0" maxOccurs="unbounded">
<xs:annotation><xs:documentation>
Define the characteristics of an OGR field / netCDF variable that applies to all layers (that actually uses it)
</xs:documentation></xs:annotation>
</xs:element>
<xs:element name="Layer" type="layerType" minOccurs="0" maxOccurs="unbounded">
<xs:annotation><xs:documentation>
Define layer specific settings for layer creaetion options, fields and attributes.
</xs:documentation></xs:annotation>
</xs:element>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:complexType name="optionType">
<xs:attribute name="name" type="xs:string"/>
<xs:attribute name="value" type="xs:string"/>
</xs:complexType>
<xs:complexType name="attributeType">
<xs:attribute name="name" type="xs:string"/>
<xs:attribute name="value" type="xs:string">
<xs:annotation><xs:documentation>
Value to set as attribute, or empty string
to delete an existing attribute
</xs:documentation></xs:annotation>
</xs:attribute>
<xs:attribute name="type" use="optional">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:enumeration value="string"/>
<xs:enumeration value="integer"/>
<xs:enumeration value="double"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
</xs:complexType>
<xs:complexType name="fieldType">
<xs:sequence>
<xs:element name="Attribute" type="attributeType" minOccurs="0" maxOccurs="unbounded">
<xs:annotation><xs:documentation>
Define an attribute that must be written (or removed) from a OGR field / netCDF variable.
</xs:documentation></xs:annotation>
</xs:element>
</xs:sequence>
<xs:attribute name="name" type="xs:string" use="optional">
<xs:annotation><xs:documentation>OGR field name.</xs:documentation></xs:annotation>
</xs:attribute>
<xs:attribute name="netcdf_name" type="xs:string" use="optional">
<xs:annotation><xs:documentation>netCDF variable name. When both name
and netcdf_name are set, the OGR field {name} will be written as the
netCDF {netcdf_name} variable. When netcdf_name is set, but name is none,
then the Field definition will match an implicitly created netCDF variable,
such as x/lon, y/lat, z, ...
</xs:documentation></xs:annotation>
</xs:attribute>
<xs:attribute name="main_dim" type="xs:string" use="optional">
<xs:annotation><xs:documentation>
Name of the main dimension against which the variable must be indexed.
If not set, the record dimension will be used. Only useful when using
a layer with FeatureType!=Point.
</xs:documentation></xs:annotation>
</xs:attribute>
</xs:complexType>
<xs:complexType name="layerType">
<xs:sequence>
<xs:element name="LayerCreationOption" type="optionType" minOccurs="0" maxOccurs="unbounded">
<xs:annotation><xs:documentation>
Define a layer creation option. Overrides or appended to
existing global layer creation options.
</xs:documentation></xs:annotation>
</xs:element>
<xs:element name="Attribute" type="attributeType" minOccurs="0" maxOccurs="unbounded">
<xs:annotation><xs:documentation>
Define a global attribute that must be written (or removed).
Overrides or appended to existing global attributes.
</xs:documentation></xs:annotation>
</xs:element>
<xs:element name="Field" type="fieldType" minOccurs="0" maxOccurs="unbounded">
<xs:annotation><xs:documentation>
Define the characteristics of an OGR field / netCDF variable
(that must exist as an explicit OGR field, or an implicitly created netCDF variable).
Supersedes global Field definition.
</xs:documentation></xs:annotation>
</xs:element>
</xs:sequence>
<xs:attribute name="name" type="xs:string">
<xs:annotation><xs:documentation>OGR layer name.</xs:documentation></xs:annotation>
</xs:attribute>
<xs:attribute name="netcdf_name" type="xs:string" use="optional">
<xs:annotation><xs:documentation>netCDF group name.</xs:documentation></xs:annotation>
</xs:attribute>
</xs:complexType>
</xs:schema>

View File

@@ -149,6 +149,10 @@
},
"comment": {
"type": "string"
},
"timezone": {
"type": "string",
"pattern": "^(localtime|(mixed timezones)|UTC|((\\+|-)[0-9][0-9]:[0-9][0-9]))$"
}
},
"required": [
@@ -208,8 +212,26 @@
"maxItems": 4
}
},
"extent3D": {
"type": "array",
"items": {
"type": [
"null",
"number"
],
"minItems": 6,
"maxItems": 6
}
},
"coordinateSystem": {
"$ref": "#/definitions/coordinateSystem"
"oneOf": [
{
"type": "null"
},
{
"$ref": "#/definitions/coordinateSystem"
}
]
},
"supportedSRSList": {
"type": "array",
@@ -243,6 +265,18 @@
}
]
}
},
"xyCoordinateResolution": {
"type": "number"
},
"zCoordinateResolution": {
"type": "number"
},
"mCoordinateResolution": {
"type": "number"
},
"coordinatePrecisionFormatSpecificOptions": {
"type": "object"
}
},
"required": [

View File

@@ -393,6 +393,9 @@
<xs:element name="SrcRegion" type="SrcRegionType"/>
<xs:element name="SRS" type="nonEmptyStringType"/>
<xs:group ref="ExtentType"/>
<xs:element name="XYResolution" type="xs:double" minOccurs="0" maxOccurs="1"/>
<xs:element name="ZResolution" type="xs:double" minOccurs="0" maxOccurs="1"/>
<xs:element name="MResolution" type="xs:double" minOccurs="0" maxOccurs="1"/>
</xs:choice>
</xs:sequence>
<xs:attributeGroup ref="GeometryFieldTypeAttrGroupWithoutSrc"/>

View File

@@ -1,67 +1,76 @@
!
! By email on December 2nd, 2010:
! From https://github.com/OSGeo/gdal/issues/8034, June 30, 2023
!
! I, Louis Burry, on behalf of PCI Geomatics agree to allow the ellips.txt
! I, Michael Goldberg, on behalf of PCI Geomatics agree to allow the ellips.txt
! and datum.txt file to be distributed under the GDAL open source license.
!
! Louis Burry
! VP Technology & Delivery
! Michael Goldberg
! Development Manager
! PCI Geomatics
!
! NOTE: The range of "D900" to "D998" is set aside for
!
! NOTE: The range of "D950" to "D998" is set aside for
! the use of local customer development.
!
! And the range of "D-90" to "D-98" is set aside for
! the use of local customer development.
!
!For datums using a grid shift file entries are:
!DatumNumber,DatumName,EllipsoidNumber,Location,GridShiftTo,GridShiftFile,GridShiftFile
!If GridShiftTo is negative the shift is reversed
!For datums not using a grid shift file converting to WGS84 using coordinate frame rotation
! (EPSG:9607 which is opposite rotation to EPSG TOWGS84) entries are:
!DatumNumber,DatumName,EllipsoidNumber,XOffset,YOffset,ZOffset,Location,XSigma,YSigma,ZSigma,Doppler,XRotate,YRotate,ZRotate,Scale
"DoD World Geodetic System 1984, DMA TR 8350.2"
"4 JUL 1997, Third Printing, Includes 3 JAN 2000 Updates"
"D-01","NAD27 (USA, NADCON)","E000","Conterminous U.S.","conus.los","conus.las"
"D-02","NAD83 (USA, NADCON)","E008","Conterminous U.S.","conus.los","conus.las"
"D-03","NAD27 (Canada, NTv1)","E000","Canada","grid.dac"
"D-04","NAD83 (Canada, NTv1)","E008","Canada","grid.dac"
"D-07","NAD27 (USA, NADCON)","E000","Alaska","alaska.los","alaska.las"
"D-08","NAD83 (USA, NADCON)","E008","Alaska","alaska.los","alaska.las"
"D-09","NAD27 (USA, NADCON)","E000","St. George","stgeorge.los","stgeorge.las"
"D-10","NAD83 (USA, NADCON)","E008","St. George","stgeorge.los","stgeorge.las"
"D-11","NAD27 (USA, NADCON)","E000","St. Lawrence","stlrnc.los","stlrnc.las"
"D-12","NAD83 (USA, NADCON)","E008","St. Lawrence","stlrnc.los","stlrnc.las"
"D-13","NAD27 (USA, NADCON)","E000","St. Paul","stpaul.los","stpaul.las"
"D-14","NAD83 (USA, NADCON)","E008","St. Paul","stpaul.los","stpaul.las"
"D-15","Old Hawaiian (USA, NADCON)","E000","Hawaii","hawaii.los","hawaii.las"
"D-16","NAD83 (USA, NADCON)","E008","Hawaii","hawaii.los","hawaii.las"
"D-17","NAD27 (USA, NADCON)","E000","Puerto Rico Virgin Islands","prvi.los","prvi.las"
"D-18","NAD83 (USA, NADCON)","E008","Puerto Rico Virgin Islands","prvi.los","prvi.las"
!"D-19","AGD66 (NTv2)","E014","Australia","A66 National (13.09.01).gsb"
!"D-20","AGD84 (NTv2)","E014","Australia","National 84 (02.07.01).gsb"
!"D-21","GDA94 (from AGD66, NTv2)","E008","Australia","A66 National (13.09.01).gsb"
!"D-22","GDA94 (from AGD84, NTv2)","E008","Australia","National 84 (02.07.01).gsb"
!"D-23","NZGD49 (NTv2)","E004","New Zealand","nzgd2kgrid0005.gsb"
!"D-24","NZGD2000 (NTv2)","E008","New Zealand","nzgd2kgrid0005.gsb"
!"D-66","NAD27 (NTv2)","E000","Quebec","na27scrs.gsb"
!"D-67","NAD83 (SCRS) (NTv2)","E008","Quebec","na27scrs.gsb"
!"D-68","NAD27 (NTv2)","E000","Quebec","na27na83.gsb"
!"D-69","NAD83 (NTv2)","E008","Quebec","na27na83.gsb"
!"D-70","NAD27 (CGQ77) (NTv2)","E000","Quebec","cq77scrs.gsb"
!"D-71","NAD83 (SCRS) (NTv2)","E008","Quebec","cq77scrs.gsb"
!"D-72","NAD27 (CGQ77) (NTv2)","E000","Quebec","cq77na83.gsb"
!"D-73","NAD83 (NTv2)","E008","Quebec","cq77na83.gsb"
!"D-74","NAD83 (NTv2)","E008","Quebec","na83scrs.gsb"
!"D-75","NAD83 (SCRS) (NTv2)","E008","Quebec","na83scrs.gsb"
!"D-76","NAD27 (NTv2)","E000","Saskatchewan","sk27-98.gsb"
!"D-77","NAD83 (CSRS98) (NTv2)","E008","Saskatchewan","sk27-98.gsb"
!"D-78","NAD83 (NTv2)","E008","Saskatchewan","sk83-98.gsb"
!"D-79","NAD83 (CSRS98) (NTv2)","E008","Saskatchewan","sk83-98.gsb"
!"D-80","ATS77 (NTv2)","E910","Nova Scotia","ns778301.gsb"
!"D-81","NAD83 (CSRS98) (NTv2)","E008","Nova Scotia","ns778301.gsb"
!"D-82","ATS77 (NTv2)","E910","Prince Edward Island","pe7783v2.gsb"
!"D-83","NAD83 (CSRS98) (NTv2)","E008","Prince Edward Island","pe7783v2.gsb"
!"D-84","ATS77 (NTv2)","E910","New Brunswick","nb7783v2.gsb"
!"D-85","NAD83 (CSRS98) (NTv2)","E008","New Brunswick","nb7783v2.gsb"
!"D-86","NAD27 (NTv2)","E000","Canada","ntv2_0.gsb"
!"D-87","NAD83 (NTv2)","E008","Canada","ntv2_0.gsb"
!"D-88","NAD27 (1976) (NTv2)","E000","Ontario","may76v20.gsb"
!"D-89","NAD83 (NTv2)","E008","Ontario","may76v20.gsb"
"D-01","NAD27 (USA, NADCON)","E000","Conterminous U.S.","D122","conus.los","conus.las"
"D-02","NAD83 (Deprecated - use D122)","E008",0,0,0,"Conterminous U.S.",2,2,2,354
"D-03","NAD27 (Canada, NTv1)","E000","Canada","D122","grid.dac"
"D-04","NAD83 (Deprecated - use D122)","E008",0,0,0,"Canada",2,2,2,354
"D-07","NAD27 (USA, NADCON)","E000","Alaska","D122","alaska.los","alaska.las"
"D-08","NAD83 (Deprecated - use D122)","E008",0,0,0,"Alaska",2,2,2,354
"D-09","NAD27 (USA, NADCON)","E000","St. George","D122","stgeorge.los","stgeorge.las"
"D-10","NAD83 (Deprecated - use D122)","E008",0,0,0,"St. George",2,2,2,354
"D-11","NAD27 (USA, NADCON)","E000","St. Lawrence","D122","stlrnc.los","stlrnc.las"
"D-12","NAD83 (Deprecated - use D122)","E008",0,0,0,"St. Lawrence",2,2,2,354
"D-13","NAD27 (USA, NADCON)","E000","St. Paul","D122","stpaul.los","stpaul.las"
"D-14","NAD83 (Deprecated - use D122)","E008",0,0,0,"St. Paul",2,2,2,354
"D-15","Old Hawaiian (USA, NADCON)","E000","Hawaii","D122","hawaii.los","hawaii.las"
"D-16","NAD83 (Deprecated - use D122)","E008",0,0,0,"Hawaii",2,2,2,354
"D-17","NAD27 (USA, NADCON)","E000","Puerto Rico Virgin Islands","D122","prvi.los","prvi.las"
"D-18","NAD83 (Deprecated - use D122)","E008",0,0,0,"Puerto Rico Virgin Islands",2,2,2,354
"D-21","GDA94 (from AGD66, NTv2)","E008","Australia","D029","A66_National_13_09_01_.gsb"
"D-22","GDA94 (from AGD84, NTv2)","E008","Australia","D030","National_84_02.07.01.gsb"
"D-24","NZGD2000 (NTv2)","E008","New Zealand","D510","nzgd2kgrid0005.gsb"
"D-25","GDA2020 (conformal, from GDA94, NTv2)","E008","Australia","D536","GDA94_GDA2020_conformal.gsb"
"D-26","GDA2020 (conformal and distortion, from GDA94, NTv2)","E008","Australia","D536","GDA94_GDA2020_conformal_and_distortion.gsb"
"D-27","GDA2020 (conformal, from GDA94, NTv2)","E008","Australia (Christmas Island)","D536","GDA94_GDA2020_conformal_christmas_island.gsb"
"D-28","GDA2020 (conformal, from GDA94, NTv2)","E008","Australia (Cocos Islands)","D536","GDA94_GDA2020_conformal_cocos_island.gsb"
"D-55","NAD83 (CSRS 2002) (NTv2)","E008","British Columbia","D122","BC_93_05.gsb"
"D-56","NAD27 (NTv2)","E000","British Columbia","-D-55","BC_27_05.gsb"
"D-57","NAD83 (CSRS) (NTv2)","E008","BC (CRD)","D122","CRD93_00.gsb"
"D-58","NAD27 (NTv2)","E000","BC (CRD)","-D-57","CRD27_00.gsb"
"D-59","NAD83 (CSRS) (NTv2)","E008","BC (Vancouver Island)","D122","NVI93_05.gsb"
"D-62","NAD27 (NTv2)","E000","Ontario (Toronto)","-D-65","TO27CSv1.gsb"
"D-63","NAD27 (NTv2)","E000","Ontario","-D-65","ON27CSv1.gsb"
"D-64","NAD27 (1976) (NTv2)","E000","Ontario","-D-65","ON76CSv1.gsb"
"D-65","NAD83 (CSRS98) (NTv2)","E008","Ontario","D122","ON83CSv1.gsb"
"D-67","NAD83 (SCRS) (NTv2)","E008","Quebec","D-68","na27scrs.gsb"
"D-68","NAD27 (NTv2)","E000","Quebec","-D122","na27na83.gsb"
"D-71","NAD83 (SCRS) (NTv2)","E008","Quebec","D-72","cq77scrs.gsb"
"D-72","NAD27 (CGQ77) (NTv2)","E000","Quebec","D122","cq77na83.gsb"
"D-75","NAD83 (SCRS) (NTv2)","E008","Quebec","D122","na83scrs.gsb"
"D-76","NAD27 (NTv2)","E000","Saskatchewan","-D-79","sk27-98.gsb"
"D-77","NAD27 (NTv2)","E000","Saskatchewan","-D122","sk27-83.gsb"
"D-79","NAD83 (CSRS98) (NTv2)","E008","Saskatchewan","-D122","sk83-98.gsb"
"D-81","NAD83 (CSRS98) (NTv2)","E008","Nova Scotia","D895","ns778301.gsb"
"D-82","ATS77 (NTv2)","E910","Nova Scotia","-D122","GS7783.GSB"
"D-83","NAD83 (CSRS98) (NTv2)","E008","Prince Edward Island","D895","pe7783v2.gsb"
"D-84","NAD83 (CSRS98) (NTv2)","E008","New Brunswick","D122","nb2783v2.gsb"
"D-85","NAD83 (CSRS98) (NTv2)","E008","New Brunswick","D895","nb7783v2.gsb"
"D-86","NAD27 (NTv2)","E000","Canada","-D122","ntv2_0.gsb"
"D-87","NAD83 (CSRS98) (NTv2)","E008","Alberta","D122","ABCSRSV4.DAC"
"D-88","NAD27 (1976) (NTv2)","E000","Ontario","D122","may76v20.gsb"
"D800","Normal Sphere","E019",0,0,0,"",0,0,0,0
"D000","WGS 1984","E012",0,0,0,"Global Definition",0,0,0,0
"D001","WGS 1972","E005",0,0,0,"Global Definition",3,3,3,1
@@ -215,7 +224,7 @@
"D149","Provisional S. American 1956","E004",-295,173,-371,"Venezuela",9,14,15,24
"D150","Provisional S. Chilean 1963","E004",16,196,93,"Chile (South, Near 53dS) (Hito XVIII)",25,25,25,2
"D151","Puerto Rico","E000",11,72,-101,"Puerto Rico, Virgin Islands",3,3,3,11
"D152","Qatar National","E004",-128,-283,22,"Qatar",20,20,20,3
"D152","Qatar National Datum 1995","E004",-127.78098,-283.37477,21.24081,"Qatar",20,20,20,3
"D153","Qornoq","E004",164,138,-189,"Greenland (South)",25,25,32,2
"D154","Reunion","E004",94,-948,-1262,"Mascarene Islands",25,25,25,1
"D155","Rome 1940","E004",-225,-65,9,"Italy (Sardinia)",25,25,25,1
@@ -225,7 +234,7 @@
"D159","Schwarzeck","E900",616,97,-251,"Namibia",20,20,20,3
"D160","Selvagem Grande 1938","E004",-289,-124,60,"Salvage Islands",25,25,25,1
"D161","SGS 85","E905",3,9,-9,"Soviet Geodetic System 1985",10,10,10,1
"D162","South American 1969","E907",-57,1,-41,"MEAN Solution,",15,6,9,84
"D162","South American 1969 (SAD69)","E907",-57,1,-41,"MEAN Solution,",15,6,9,84
"D163","South American 1969","E907",-62,-1,-37,"Argentina",5,5,5,10
"D164","South American 1969","E907",-61,2,-48,"Bolivia",15,15,15,4
"D165","South American 1969 (old)","E907",-60,-2,-41,"Brazil",3,5,5,22
@@ -316,7 +325,7 @@
"D516","SL datum 1999","E006",-0.2933,766.9499,87.7131,"Sri Lanka",0,0,0,0,-0.1957040,-1.6950677,-3.4730161,-0.0393
"D517","Cape (Supercedes D040)","E205",-134.73,-110.92,-292.66,"South Africa",0,0,0,0
"D518","Hartebeesthoek94","E012",0,0,0,"South Africa",0,0,0,0
"D519","Abidjan 1987","E001",-124.76,53,466.79,"C\uffffte d'Ivoire",0,0,0,0
"D519","Abidjan 1987","E001",-124.76,53,466.79,"Cote d'Ivoire",0,0,0,0
"D520","Accra","E204",-199,32,322,"Ghana",0,0,0,0
"D521","Azores Central 1948","E004",-104,167,-38,"Azores",0,0,0,0
"D522","Azores Oriental 1940","E004",-203,141,53,"Azores",0,0,0,0
@@ -351,113 +360,171 @@
"D600","D-PAF (Orbits)","E600",0.082,-0.502,-0.224,"Satellite Orbits",0,0,0,0,0.30444,0.04424,0.00609,0.9999999937
"D601","Test Data Set 1","E601",0.071,-0.509,-0.166,"Test 1",0,0,0,0,0.0179,-0.0005,0.0067,0.999999983
"D602","Test Data Set 2","E602",580.0,80.9,399.8,"Test 2",0,0,0,0,0.35,0.1,3.026,1.0000113470025
"D610","US Standard Datum (USA, NADCON5)","E000","Conterminous U.S.","D611","nadcon5.ussd.nad27.conus.lon.trn.20160901.b","nadcon5.ussd.nad27.conus.lat.trn.20160901.b"
"D611","NAD27 (USA, NADCON5)","E000","Conterminous U.S.","D122","nadcon5.nad27.nad83_1986.conus.lon.trn.20160901.b","nadcon5.nad27.nad83_1986.conus.lat.trn.20160901.b"
"D612","NAD83 (HARN) (USA, NADCON5)","E008","Conterminous U.S.","-D122","nadcon5.nad83_1986.nad83_harn.conus.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_harn.conus.lat.trn.20160901.b"
"D613","NAD83 (FBN) (USA, NADCON5)","E008","Conterminous U.S.","-D612","nadcon5.nad83_harn.nad83_fbn.conus.lon.trn.20160901.b","nadcon5.nad83_harn.nad83_fbn.conus.lat.trn.20160901.b","nadcon5.nad83_harn.nad83_fbn.conus.eht.trn.20160901.b"
"D614","NAD83 (NSRS 2007) (USA, NADCON5)","E008","Conterminous U.S.","-D613","nadcon5.nad83_fbn.nad83_2007.conus.lon.trn.20160901.b","nadcon5.nad83_fbn.nad83_2007.conus.lat.trn.20160901.b","nadcon5.nad83_fbn.nad83_2007.conus.eht.trn.20160901.b"
"D615","NAD83 (2011) (USA, NADCON5)","E008","Conterminous U.S.","-D614","nadcon5.nad83_2007.nad83_2011.conus.lon.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.conus.lat.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.conus.eht.trn.20160901.b"
"D620","Puerto Rico Datum, adjustment of 1940 (USA, NADCON5)","E000","Puerto Rico, Virgin Islands","D122","nadcon5.pr40.nad83_1986.prvi.lon.trn.20160901.b","nadcon5.pr40.nad83_1986.prvi.lat.trn.20160901.b"
"D621","NAD83 (1993) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D122","nadcon5.nad83_1986.nad83_1993.prvi.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_1993.prvi.lat.trn.20160901.b"
"D622","NAD83 (1997) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D621","nadcon5.nad83_1993.nad83_1997.prvi.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_1997.prvi.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_1997.prvi.eht.trn.20160901.b"
"D623","NAD83 (2002) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D622","nadcon5.nad83_1997.nad83_2002.prvi.lon.trn.20160901.b","nadcon5.nad83_1997.nad83_2002.prvi.lat.trn.20160901.b","nadcon5.nad83_1997.nad83_2002.prvi.eht.trn.20160901.b"
"D624","NAD83 (NSRS 2007) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D623","nadcon5.nad83_2002.nad83_2007.prvi.lon.trn.20160901.b","nadcon5.nad83_2002.nad83_2007.prvi.lat.trn.20160901.b","nadcon5.nad83_2002.nad83_2007.prvi.eht.trn.20160901.b"
"D625","NAD83 (2011) (USA, NADCON5)","E008","Puerto Rico, Virgin Islands","-D624","nadcon5.nad83_2007.nad83_2011.prvi.lon.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.prvi.lat.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.prvi.eht.trn.20160901.b"
"D630","Old Hawaiian Datum (USA, NADCON5)","E000","Hawaii","D122","nadcon5.ohd.nad83_1986.hawaii.lon.trn.20160901.b","nadcon5.ohd.nad83_1986.hawaii.lat.trn.20160901.b"
"D631","NAD83 (1993) (USA, NADCON5)","E008","Hawaii","-D122","nadcon5.nad83_1986.nad83_1993.hawaii.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_1993.hawaii.lat.trn.20160901.b"
"D632","NAD83 (PA11) (USA, NADCON5)","E008","Hawaii","-D631","nadcon5.nad83_1993.nad83_pa11.hawaii.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_pa11.hawaii.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_pa11.hawaii.eht.trn.20160901.b"
"D640","NAD27 (USA, NADCON5)","E000","Alaska","D122","nadcon5.nad27.nad83_1986.alaska.lon.trn.20160901.b","nadcon5.nad27.nad83_1986.alaska.lat.trn.20160901.b"
"D641","NAD83 (1992) (USA, NADCON5)","E008","Alaska","-D122","nadcon5.nad83_1986.nad83_1992.alaska.lon.trn.20160901.b","nadcon5.nad83_1986.nad83_1992.alaska.lat.trn.20160901.b"
"D642","NAD83 (NSRS 2007) (USA, NADCON5)","E008","Alaska","-D641","nadcon5.nad83_1992.nad83_2007.alaska.lon.trn.20160901.b","nadcon5.nad83_1992.nad83_2007.alaska.lat.trn.20160901.b","nadcon5.nad83_1992.nad83_2007.alaska.eht.trn.20160901.b"
"D643","NAD83 (2011) (USA, NADCON5)","E008","Alaska","-D642","nadcon5.nad83_2007.nad83_2011.alaska.lon.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.alaska.lat.trn.20160901.b","nadcon5.nad83_2007.nad83_2011.alaska.eht.trn.20160901.b"
"D650","St. Paul 1897 (USA, NADCON5)","E000","St. Paul, Alaska","D651","nadcon5.sp1897.sp1952.stpaul.lon.trn.20160901.b","nadcon5.sp1897.sp1952.stpaul.lat.trn.20160901.b"
"D651","St. Paul 1952 (USA, NADCON5)","E000","St. Paul, Alaska","D122","nadcon5.sp1952.nad83_1986.stpaul.lon.trn.20160901.b","nadcon5.sp1952.nad83_1986.stpaul.lat.trn.20160901.b"
"D652","St. George 1897 (USA, NADCON5)","E000","St. George, Alaska","D653","nadcon5.sg1897.sg1952.stgeorge.lon.trn.20160901.b","nadcon5.sg1897.sg1952.stgeorge.lat.trn.20160901.b"
"D653","St. George 1952 (USA, NADCON5)","E000","St. George, Alaska","D122","nadcon5.sg1952.nad83_1986.stgeorge.lon.trn.20160901.b","nadcon5.sg1952.nad83_1986.stgeorge.lat.trn.20160901.b"
"D654","St. Lawrence 1952 (USA, NADCON5)","E000","St. Lawrence, Alaska","D122","nadcon5.sl1952.nad83_1986.stlawrence.lon.trn.20160901.b","nadcon5.sl1952.nad83_1986.stlawrence.lat.trn.20160901.b"
"D660","American Samoa 1962 (USA, NADCON5)","E000","American Samoa","D122","nadcon5.as62.nad83_1993.as.lon.trn.20160901.b","nadcon5.as62.nad83_1993.as.lat.trn.20160901.b"
"D661","NAD83 (2002) (USA, NADCON5)","E008","American Samoa","-D122","nadcon5.nad83_1993.nad83_2002.as.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.as.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.as.eht.trn.20160901.b"
"D662","NAD83 (PA11) (USA, NADCON5)","E008","American Samoa","-D661","nadcon5.nad83_2002.nad83_pa11.as.lon.trn.20160901.b","nadcon5.nad83_2002.nad83_pa11.as.lat.trn.20160901.b","nadcon5.nad83_2002.nad83_pa11.as.eht.trn.20160901.b"
"D670","Guam 1963 (USA, NADCON5)","E000","Guam and the Commonwealth of the Northern Mariana Islands","D122","nadcon5.gu63.nad83_1993.guamcnmi.lon.trn.20160901.b","nadcon5.gu63.nad83_1993.guamcnmi.lat.trn.20160901.b"
"D671","NAD83 (2002) (USA, NADCON5)","E008","Guam and the Commonwealth of the Northern Mariana Islands","-D122","nadcon5.nad83_1993.nad83_2002.guamcnmi.lon.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.guamcnmi.lat.trn.20160901.b","nadcon5.nad83_1993.nad83_2002.guamcnmi.eht.trn.20160901.b"
"D672","NAD83 (MA11) (USA, NADCON5)","E008","Guam and the Commonwealth of the Northern Mariana Islands","-D671","nadcon5.nad83_2002.nad83_ma11.guamcnmi.lon.trn.20160901.b","nadcon5.nad83_2002.nad83_ma11.guamcnmi.lat.trn.20160901.b","nadcon5.nad83_2002.nad83_ma11.guamcnmi.eht.trn.20160901.b"
"D700","MODIS","E700",0,0,0,"Global Definition",0,0,0,0
"D701","NAD83 (USA, NADCON)","E008","Alabama","alhpgn.los","alhpgn.las"
"D702","NAD83 HARN (USA, NADCON)","E008","Alabama","alhpgn.los","alhpgn.las"
"D703","NAD83 (USA, NADCON)","E008","Arkansas","arhpgn.los","arhpgn.las"
"D704","NAD83 HARN (USA, NADCON)","E008","Arkansas","arhpgn.los","arhpgn.las"
"D705","NAD83 (USA, NADCON)","E008","Arizona","azhpgn.los","azhpgn.las"
"D706","NAD83 HARN (USA, NADCON)","E008","Arizona","azhpgn.los","azhpgn.las"
"D707","NAD83 (USA, NADCON)","E008","California (North of 37dN)","cnhpgn.los","cnhpgn.las"
"D708","NAD83 HARN (USA, NADCON)","E008","California (North of 37dN)","cnhpgn.los","cnhpgn.las"
"D709","NAD83 (USA, NADCON)","E008","California (South of 37dN)","cshpgn.los","cshpgn.las"
"D710","NAD83 HARN (USA, NADCON)","E008","California (South of 37dN)","cshpgn.los","cshpgn.las"
"D711","NAD83 (USA, NADCON)","E008","Colorado","cohpgn.los","cohpgn.las"
"D712","NAD83 HARN (USA, NADCON)","E008","Colorado","cohpgn.los","cohpgn.las"
"D713","NAD83 (USA, NADCON)","E008","Florida","flhpgn.los","flhpgn.las"
"D714","NAD83 HARN (USA, NADCON)","E008","Florida","flhpgn.los","flhpgn.las"
"D715","NAD83 (USA, NADCON)","E008","Georgia","gahpgn.los","gahpgn.las"
"D716","NAD83 HARN (USA, NADCON)","E008","Georgia","gahpgn.los","gahpgn.las"
"D717","Guam 1963 (USA, NADCON)","E000","Guam","guhpgn.los","guhpgn.las"
"D718","NAD83 HARN (USA, NADCON)","E008","Guam","guhpgn.los","guhpgn.las"
"D719","NAD83 (USA, NADCON)","E008","Hawaii","hihpgn.los","hihpgn.las"
"D720","NAD83 HARN (USA, NADCON)","E008","Hawaii","hihpgn.los","hihpgn.las"
"D721","NAD83 (USA, NADCON)","E008","Idaho-Montana (East of 113dW)","emhpgn.los","emhpgn.las"
"D722","NAD83 HARN (USA, NADCON)","E008","Idaho-Montana (East of 113dW)","emhpgn.los","emhpgn.las"
"D723","NAD83 (USA, NADCON)","E008","Idaho-Montana (West of 113dW)","wmhpgn.los","wmhpgn.las"
"D724","NAD83 HARN (USA, NADCON)","E008","Idaho-Montana (West of 113dW)","wmhpgn.los","wmhpgn.las"
"D725","NAD83 (USA, NADCON)","E008","Iowa","iahpgn.los","iahpgn.las"
"D726","NAD83 HARN (USA, NADCON)","E008","Iowa","iahpgn.los","iahpgn.las"
"D727","NAD83 (USA, NADCON)","E008","Illinois","ilhpgn.los","ilhpgn.las"
"D728","NAD83 HARN (USA, NADCON)","E008","Illinois","ilhpgn.los","ilhpgn.las"
"D729","NAD83 (USA, NADCON)","E008","Indiana","inhpgn.los","inhpgn.las"
"D730","NAD83 HARN (USA, NADCON)","E008","Indiana","inhpgn.los","inhpgn.las"
"D731","NAD83 (USA, NADCON)","E008","Kansas","kshpgn.los","kshpgn.las"
"D732","NAD83 HARN (USA, NADCON)","E008","Kansas","kshpgn.los","kshpgn.las"
"D733","NAD83 (USA, NADCON)","E008","Kentucky","kyhpgn.los","kyhpgn.las"
"D734","NAD83 HARN (USA, NADCON)","E008","Kentucky","kyhpgn.los","kyhpgn.las"
"D735","NAD83 (USA, NADCON)","E008","Louisiana","lahpgn.los","lahpgn.las"
"D736","NAD83 HARN (USA, NADCON)","E008","Louisiana","lahpgn.los","lahpgn.las"
"D737","NAD83 (USA, NADCON)","E008","Maryland-Delaware","mdhpgn.los","mdhpgn.las"
"D738","NAD83 HARN (USA, NADCON)","E008","Maryland-Delaware","mdhpgn.los","mdhpgn.las"
"D739","NAD83 (USA, NADCON)","E008","Maine","mehpgn.los","mehpgn.las"
"D740","NAD83 HARN (USA, NADCON)","E008","Maine","mehpgn.los","mehpgn.las"
"D741","NAD83 (USA, NADCON)","E008","Michigan","mihpgn.los","mihpgn.las"
"D742","NAD83 HARN (USA, NADCON)","E008","Michigan","mihpgn.los","mihpgn.las"
"D743","NAD83 (USA, NADCON)","E008","Minnesota","mnhpgn.los","mnhpgn.las"
"D744","NAD83 HARN (USA, NADCON)","E008","Minnesota","mnhpgn.los","mnhpgn.las"
"D745","NAD83 (USA, NADCON)","E008","Mississippi","mshpgn.los","mshpgn.las"
"D746","NAD83 HARN (USA, NADCON)","E008","Mississippi","mshpgn.los","mshpgn.las"
"D747","NAD83 (USA, NADCON)","E008","Missouri","mohpgn.los","mohpgn.las"
"D748","NAD83 HARN (USA, NADCON)","E008","Missouri","mohpgn.los","mohpgn.las"
"D749","NAD83 (USA, NADCON)","E008","Nebraska","nbhpgn.los","nbhpgn.las"
"D750","NAD83 HARN (USA, NADCON)","E008","Nebraska","nbhpgn.los","nbhpgn.las"
"D751","NAD83 (USA, NADCON)","E008","Nevada","nvhpgn.los","nvhpgn.las"
"D752","NAD83 HARN (USA, NADCON)","E008","Nevada","nvhpgn.los","nvhpgn.las"
"D753","NAD83 (USA, NADCON)","E008","New England (CT,MA,NH,RI,VT)","nehpgn.los","nehpgn.las"
"D754","NAD83 HARN (USA, NADCON)","E008","New England (CT,MA,NH,RI,VT)","nehpgn.los","nehpgn.las"
"D755","NAD83 (USA, NADCON)","E008","New Jersey","njhpgn.los","njhpgn.las"
"D756","NAD83 HARN (USA, NADCON)","E008","New Jersey","njhpgn.los","njhpgn.las"
"D757","NAD83 (USA, NADCON)","E008","New Mexico","nmhpgn.los","nmhpgn.las"
"D758","NAD83 HARN (USA, NADCON)","E008","New Mexico","nmhpgn.los","nmhpgn.las"
"D759","NAD83 (USA, NADCON)","E008","New York","nyhpgn.los","nyhpgn.las"
"D760","NAD83 HARN (USA, NADCON)","E008","New York","nyhpgn.los","nyhpgn.las"
"D761","NAD83 (USA, NADCON)","E008","North Carolina","nchpgn.los","nchpgn.las"
"D762","NAD83 HARN (USA, NADCON)","E008","North Carolina","nchpgn.los","nchpgn.las"
"D763","NAD83 (USA, NADCON)","E008","North Dakota","ndhpgn.los","ndhpgn.las"
"D764","NAD83 HARN (USA, NADCON)","E008","North Dakota","ndhpgn.los","ndhpgn.las"
"D765","NAD83 (USA, NADCON)","E008","Ohio","ohhpgn.los","ohhpgn.las"
"D766","NAD83 HARN (USA, NADCON)","E008","Ohio","ohhpgn.los","ohhpgn.las"
"D767","NAD83 (USA, NADCON)","E008","Oklahoma","okhpgn.los","okhpgn.las"
"D768","NAD83 HARN (USA, NADCON)","E008","Oklahoma","okhpgn.los","okhpgn.las"
"D769","NAD83 (USA, NADCON)","E008","Pennsylvania","pahpgn.los","pahpgn.las"
"D770","NAD83 HARN (USA, NADCON)","E008","Pennsylvania","pahpgn.los","pahpgn.las"
"D771","NAD83 (USA, NADCON)","E008","Puerto Rico-Virgin Is","pvhpgn.los","pvhpgn.las"
"D772","NAD83 HARN (USA, NADCON)","E008","Puerto Rico-Virgin Is","pvhpgn.los","pvhpgn.las"
"D773","American Samoa 1962 (USA, NADCON)","E000","Samoa (Eastern Islands)","eshpgn.los","eshpgn.las"
"D774","NAD83 HARN (USA, NADCON)","E008","Samoa (Eastern Islands)","eshpgn.los","eshpgn.las"
"D775","American Samoa 1962 (USA, NADCON)","E000","Samoa (Western Islands)","wshpgn.los","wshpgn.las"
"D776","NAD83 HARN (USA, NADCON)","E008","Samoa (Western Islands)","wshpgn.los","wshpgn.las"
"D777","NAD83 (USA, NADCON)","E008","South Carolina","schpgn.los","schpgn.las"
"D778","NAD83 HARN (USA, NADCON)","E008","South Carolina","schpgn.los","schpgn.las"
"D779","NAD83 (USA, NADCON)","E008","South Dakota","sdhpgn.los","sdhpgn.las"
"D780","NAD83 HARN (USA, NADCON)","E008","South Dakota","sdhpgn.los","sdhpgn.las"
"D781","NAD83 (USA, NADCON)","E008","Tennessee","tnhpgn.los","tnhpgn.las"
"D782","NAD83 HARN (USA, NADCON)","E008","Tennessee","tnhpgn.los","tnhpgn.las"
"D783","NAD83 (USA, NADCON)","E008","Texas (East of 100dW)","ethpgn.los","ethpgn.las"
"D784","NAD83 HARN (USA, NADCON)","E008","Texas (East of 100dW)","ethpgn.los","ethpgn.las"
"D785","NAD83 (USA, NADCON)","E008","Texas (West of 100dW)","wthpgn.los","wthpgn.las"
"D786","NAD83 HARN (USA, NADCON)","E008","Texas (West of 100dW)","wthpgn.los","wthpgn.las"
"D787","NAD83 (USA, NADCON)","E008","Utah","uthpgn.los","uthpgn.las"
"D788","NAD83 HARN (USA, NADCON)","E008","Utah","uthpgn.los","uthpgn.las"
"D789","NAD83 (USA, NADCON)","E008","Virginia","vahpgn.los","vahpgn.las"
"D790","NAD83 HARN (USA, NADCON)","E008","Virginia","vahpgn.los","vahpgn.las"
"D791","NAD83 (USA, NADCON)","E008","Washington-Oregon","wohpgn.los","wohpgn.las"
"D792","NAD83 HARN (USA, NADCON)","E008","Washington-Oregon","wohpgn.los","wohpgn.las"
"D793","NAD83 (USA, NADCON)","E008","West Virginia","wvhpgn.los","wvhpgn.las"
"D794","NAD83 HARN (USA, NADCON)","E008","West Virginia","wvhpgn.los","wvhpgn.las"
"D795","NAD83 (USA, NADCON)","E008","Wisconsin","wihpgn.los","wihpgn.las"
"D796","NAD83 HARN (USA, NADCON)","E008","Wisconsin","wihpgn.los","wihpgn.las"
"D797","NAD83 (USA, NADCON)","E008","Wyoming","wyhpgn.los","wyhpgn.las"
"D798","NAD83 HARN (USA, NADCON)","E008","Wyoming","wyhpgn.los","wyhpgn.las"
"D886","Reseau Geodesique Francais 1993","E899",-752,-358,-179,"Taiwan",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329
"D887","Reseau National Belge 1972","E899",-752,-358,-179,"Taiwan",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329
"D701","NAD83 (Deprecated - use D122)","E008",0,0,0,"Alabama",2,2,2,354
"D702","NAD83 HARN (USA, NADCON)","E008","Alabama","D122","alhpgn.los","alhpgn.las"
"D703","NAD83 (Deprecated - use D122)","E008",0,0,0,"Arkansas",2,2,2,354
"D704","NAD83 HARN (USA, NADCON)","E008","Arkansas","D122","arhpgn.los","arhpgn.las"
"D705","NAD83 (Deprecated - use D122)","E008",0,0,0,"Arizona",2,2,2,354
"D706","NAD83 HARN (USA, NADCON)","E008","Arizona","D122","azhpgn.los","azhpgn.las"
"D707","NAD83 (Deprecated - use D122)","E008",0,0,0,"California (North of 37dN)",2,2,2,354
"D708","NAD83 HARN (USA, NADCON)","E008","California (North of 37dN)","D122","cnhpgn.los","cnhpgn.las"
"D709","NAD83 (Deprecated - use D122)","E008",0,0,0,"California (South of 37dN)",2,2,2,354
"D710","NAD83 HARN (USA, NADCON)","E008","California (South of 37dN)","D122","cshpgn.los","cshpgn.las"
"D711","NAD83 (Deprecated - use D122)","E008",0,0,0,"Colorado",2,2,2,354
"D712","NAD83 HARN (USA, NADCON)","E008","Colorado","D122","cohpgn.los","cohpgn.las"
"D713","NAD83 (Deprecated - use D122)","E008",0,0,0,"Florida",2,2,2,354
"D714","NAD83 HARN (USA, NADCON)","E008","Florida","D122","flhpgn.los","flhpgn.las"
"D715","NAD83 (Deprecated - use D122)","E008",0,0,0,"Georgia",2,2,2,354
"D716","NAD83 HARN (USA, NADCON)","E008","Georgia","D122","gahpgn.los","gahpgn.las"
"D717","Guam 1963 (Deprecated - use D068)","E000",-100,-248,259,"Guam",3,3,3,5
"D718","NAD83 HARN (USA, NADCON)","E008","Guam","D068","guhpgn.los","guhpgn.las"
"D719","NAD83 (Deprecated - use D122)","E008",0,0,0,"Hawaii",2,2,2,354
"D720","NAD83 HARN (USA, NADCON)","E008","Hawaii","D122","hihpgn.los","hihpgn.las"
"D721","NAD83 (Deprecated - use D122)","E008",0,0,0,"Idaho-Montana (East of 113dW)",2,2,2,354
"D722","NAD83 HARN (USA, NADCON)","E008","Idaho-Montana (East of 113dW)","D122","emhpgn.los","emhpgn.las"
"D723","NAD83 (Deprecated - use D122)","E008",0,0,0,"Idaho-Montana (West of 113dW)",2,2,2,354
"D724","NAD83 HARN (USA, NADCON)","E008","Idaho-Montana (West of 113dW)","D122","wmhpgn.los","wmhpgn.las"
"D725","NAD83 (Deprecated - use D122)","E008",0,0,0,"Iowa",2,2,2,354
"D726","NAD83 HARN (USA, NADCON)","E008","Iowa","D122","iahpgn.los","iahpgn.las"
"D727","NAD83 (Deprecated - use D122)","E008",0,0,0,"Illinois",2,2,2,354
"D728","NAD83 HARN (USA, NADCON)","E008","Illinois","D122","ilhpgn.los","ilhpgn.las"
"D729","NAD83 (Deprecated - use D122)","E008",0,0,0,"Indiana",2,2,2,354
"D730","NAD83 HARN (USA, NADCON)","E008","Indiana","D122","inhpgn.los","inhpgn.las"
"D731","NAD83 (Deprecated - use D122)","E008",0,0,0,"Kansas",2,2,2,354
"D732","NAD83 HARN (USA, NADCON)","E008","Kansas","D122","kshpgn.los","kshpgn.las"
"D733","NAD83 (Deprecated - use D122)","E008",0,0,0,"Kentucky",2,2,2,354
"D734","NAD83 HARN (USA, NADCON)","E008","Kentucky","D122","kyhpgn.los","kyhpgn.las"
"D735","NAD83 (Deprecated - use D122)","E008",0,0,0,"Louisiana",2,2,2,354
"D736","NAD83 HARN (USA, NADCON)","E008","Louisiana","D122","lahpgn.los","lahpgn.las"
"D737","NAD83 (Deprecated - use D122)","E008",0,0,0,"Maryland-Delaware",2,2,2,354
"D738","NAD83 HARN (USA, NADCON)","E008","Maryland-Delaware","D122","mdhpgn.los","mdhpgn.las"
"D739","NAD83 (Deprecated - use D122)","E008",0,0,0,"Maine",2,2,2,354
"D740","NAD83 HARN (USA, NADCON)","E008","Maine","D122","mehpgn.los","mehpgn.las"
"D741","NAD83 (Deprecated - use D122)","E008",0,0,0,"Michigan",2,2,2,354
"D742","NAD83 HARN (USA, NADCON)","E008","Michigan","D122","mihpgn.los","mihpgn.las"
"D743","NAD83 (Deprecated - use D122)","E008",0,0,0,"Minnesota",2,2,2,354
"D744","NAD83 HARN (USA, NADCON)","E008","Minnesota","D122","mnhpgn.los","mnhpgn.las"
"D745","NAD83 (Deprecated - use D122)","E008",0,0,0,"Mississippi",2,2,2,354
"D746","NAD83 HARN (USA, NADCON)","E008","Mississippi","D122","mshpgn.los","mshpgn.las"
"D747","NAD83 (Deprecated - use D122)","E008",0,0,0,"Missouri",2,2,2,354
"D748","NAD83 HARN (USA, NADCON)","E008","Missouri","D122","mohpgn.los","mohpgn.las"
"D749","NAD83 (Deprecated - use D122)","E008",0,0,0,"Nebraska",2,2,2,354
"D750","NAD83 HARN (USA, NADCON)","E008","Nebraska","D122","nbhpgn.los","nbhpgn.las"
"D751","NAD83 (Deprecated - use D122)","E008",0,0,0,"Nevada",2,2,2,354
"D752","NAD83 HARN (USA, NADCON)","E008","Nevada","D122","nvhpgn.los","nvhpgn.las"
"D753","NAD83 (Deprecated - use D122)","E008",0,0,0,"New England (CT,MA,NH,RI,VT",2,2,2,354
"D754","NAD83 HARN (USA, NADCON)","E008","New England (CT,MA,NH,RI,VT)","D122","nehpgn.los","nehpgn.las"
"D755","NAD83 (Deprecated - use D122)","E008",0,0,0,"New Jersey",2,2,2,354
"D756","NAD83 HARN (USA, NADCON)","E008","New Jersey","D122","njhpgn.los","njhpgn.las"
"D757","NAD83 (Deprecated - use D122)","E008",0,0,0,"New Mexico",2,2,2,354
"D758","NAD83 HARN (USA, NADCON)","E008","New Mexico","D122","nmhpgn.los","nmhpgn.las"
"D759","NAD83 (Deprecated - use D122)","E008",0,0,0,"New York",2,2,2,354
"D760","NAD83 HARN (USA, NADCON)","E008","New York","D122","nyhpgn.los","nyhpgn.las"
"D761","NAD83 (Deprecated - use D122)","E008",0,0,0,"North Carolina",2,2,2,354
"D762","NAD83 HARN (USA, NADCON)","E008","North Carolina","D122","nchpgn.los","nchpgn.las"
"D763","NAD83 (Deprecated - use D122)","E008",0,0,0,"North Dakota",2,2,2,354
"D764","NAD83 HARN (USA, NADCON)","E008","North Dakota","D122","ndhpgn.los","ndhpgn.las"
"D765","NAD83 (Deprecated - use D122)","E008",0,0,0,"Ohio",2,2,2,354
"D766","NAD83 HARN (USA, NADCON)","E008","Ohio","D122","ohhpgn.los","ohhpgn.las"
"D767","NAD83 (Deprecated - use D122)","E008",0,0,0,"Oklahoma",2,2,2,354
"D768","NAD83 HARN (USA, NADCON)","E008","Oklahoma","D122","okhpgn.los","okhpgn.las"
"D769","NAD83 (Deprecated - use D122)","E008",0,0,0,"Pennsylvania",2,2,2,354
"D770","NAD83 HARN (USA, NADCON)","E008","Pennsylvania","D122","pahpgn.los","pahpgn.las"
"D771","NAD83 (Deprecated - use D122)","E008",0,0,0,"Puerto Rico-Virgin Is",2,2,2,354
"D772","NAD83 HARN (USA, NADCON)","E008","Puerto Rico-Virgin Is","D122","pvhpgn.los","pvhpgn.las"
"D773","American Samoa 1962 (Deprecated - use D189)","E000",-115,118,426,"Samoa (Eastern Islands)",25,25,25,2
"D774","NAD83 HARN (USA, NADCON)","E008","Samoa (Eastern Islands)","D189","eshpgn.los","eshpgn.las"
"D775","American Samoa 1962 (Deprecated - use D189)","E000",-115,118,426,"Samoa (Western Islands)",25,25,25,2
"D776","NAD83 HARN (USA, NADCON)","E008","Samoa (Western Islands)","D189","wshpgn.los","wshpgn.las"
"D777","NAD83 (Deprecated - use D122)","E008",0,0,0,"South Carolina",2,2,2,354
"D778","NAD83 HARN (USA, NADCON)","E008","South Carolina","D122","schpgn.los","schpgn.las"
"D779","NAD83 (Deprecated - use D122)","E008",0,0,0,"South Dakota",2,2,2,354
"D780","NAD83 HARN (USA, NADCON)","E008","South Dakota","D122","sdhpgn.los","sdhpgn.las"
"D781","NAD83 (Deprecated - use D122)","E008",0,0,0,"Tennessee",2,2,2,354
"D782","NAD83 HARN (USA, NADCON)","E008","Tennessee","D122","tnhpgn.los","tnhpgn.las"
"D783","NAD83 (Deprecated - use D122)","E008",0,0,0,"Texas (East of 100dW)",2,2,2,354
"D784","NAD83 HARN (USA, NADCON)","E008","Texas (East of 100dW)","D122","ethpgn.los","ethpgn.las"
"D785","NAD83 (Deprecated - use D122)","E008",0,0,0,"Texas (West of 100dW)",2,2,2,354
"D786","NAD83 HARN (USA, NADCON)","E008","Texas (West of 100dW)","D122","wthpgn.los","wthpgn.las"
"D787","NAD83 (Deprecated - use D122)","E008",0,0,0,"Utah",2,2,2,354
"D788","NAD83 HARN (USA, NADCON)","E008","Utah","D122","uthpgn.los","uthpgn.las"
"D789","NAD83 (Deprecated - use D122)","E008",0,0,0,"Virginia",2,2,2,354
"D790","NAD83 HARN (USA, NADCON)","E008","Virginia","D122","vahpgn.los","vahpgn.las"
"D791","NAD83 (Deprecated - use D122)","E008",0,0,0,"Washington-Oregon",2,2,2,354
"D792","NAD83 HARN (USA, NADCON)","E008","Washington-Oregon","D122","wohpgn.los","wohpgn.las"
"D793","NAD83 (Deprecated - use D122)","E008",0,0,0,"West Virginia",2,2,2,354
"D794","NAD83 HARN (USA, NADCON)","E008","West Virginia","D122","wvhpgn.los","wvhpgn.las"
"D795","NAD83 (Deprecated - use D122)","E008",0,0,0,"Wisconsin",2,2,2,354
"D796","NAD83 HARN (USA, NADCON)","E008","Wisconsin","D122","wihpgn.los","wihpgn.las"
"D797","NAD83 (Deprecated - use D122)","E008",0,0,0,"Wyoming",2,2,2,354
"D798","NAD83 HARN (USA, NADCON)","E008","Wyoming","D122","wyhpgn.los","wyhpgn.las"
"D888","Lebanon Stereographic","E012",154.2668777,107.2190767,-263.01161212,"Lebanon",0,0,0,0,0.310716,0.218736,0.191232,0.99999913
"D889","Lebanon Lambert","E202",190.9999,133.32473,-232.8391,"Lebanon",0,0,0,0,0.307836,0.216756,0.189036,0.9995341
"D890","Luxembourg (LUREF)","E004",-192.986,13.673,-39.309,"Luxembourg",0,0,0,0,0.409900,2.933200,-2.688100,1.00000043
"D891","Datum 73","E004",-223.237,110.193,36.649,"Portugal",0,0,0,0
"D892","Datum Lisboa","E004",-304.046,-60.576,103.640,"Portugal",0,0,0,0
"D893","PDO Survey Datum 1993","E001",-180.624,-225.516,173.919,"Oman",0,0,0,0,0.80970,1.89755,-8.33604,16.71006
"D894","WGS 1984 semi-major","E020",0,0,0,"WGS 1984 Auxiliary Sphere semi-major axis",0,0,0,0
"D898","TWD97","E008",0,0,0,"Taiwan",0,0,0,0,0.0,0.0,0.0,0.0
"D899","TWD67","E899",-752,-358,-179,"Taiwan",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329
"D886","Reseau Geodesique Francais 1993","E899",-752,-358,-179,"France",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329
"D887","Reseau National Belge 1972","E899",-752,-358,-179,"Belgium",0,0,0,0,-0.0000011698,0.0000018398,0.0000009822,0.00002329
"D819","Xian 1980","E224",0,0,0,"China",0,0,0,0,0,0,0,0
"D820","Korea 2000","E008",0.0,0.0,0.0,"South Korea",0,0,0,0
"D821","Pulkovo 1995","E015",24.47,-130.89,-81.56,"Russian Federation",0,0,0,0,0,0,-0.13,-0.22
"D822","Beijing 1954","E015",15.8,-154.4,-82.3,"China",0,0,0,0
"D823","Stockholm 1938 (RT38)","E002",0.0,0.0,0.0,"Sweden",0,0,0,0
"D824","Greenland 1996 (GR96)","E008",0.0,0.0,0.0,"Greenland",0,0,0,0
"D825","Libyan Geodetic Datum 2006 (LGD2006)","E004",-208.406,-109.878,-2.5764,"Libya",0,0,0,0
"D826","Reseau Geodesique de la Polynesie Francaise (RGPF)","E008",0.072,-0.507,-0.245,"French Polynesia",0,0,0,0,0.0183,-0.0003,0.007,-0.0093
"D827","IGC 1962 6th Parallel South","E001",0.0,0.0,0.0,"Democratic Republic of the Congo - adjacent to 6th parallel south",0,0,0,0
"D828","Geodetic Datum of Malaysia (GDM)","E008",0.0,0.0,0.0,"Malaysia",0,0,0,0
"D829","New Beijing","E015",0.0,0.0,0.0,"China",0,0,0,0
"D830","Turkish National Reference Frame (TUKREF)","E008",0.0,0.0,0.0,"Turkey",0,0,0,0
"D831","Bhutan National Geodetic Datum (DRUKREF)","E008",0.0,0.0,0.0,"Bhutan",0,0,0,0
"D832","Ukraine 2000","E015",0.0,0.0,0.0,"Ukraine",0,0,0,0
"D833","Japanese Geodetic Datum 2011 (JGD2011)","E008",0.0,0.0,0.0,"Japan",0,0,0,0
"D834","Posiciones Geodesicas Argentinas 1998 (POSGAR 98)","E008",0.0,0.0,0.0,"Argentina",0,0,0,0
"D835","Posiciones Geodesicas Argentinas 1994 (POSGAR 94)","E012",0.0,0.0,0.0,"Argentina",0,0,0,0
"D836","Posiciones Geodesicas Argentinas 2007 (POSGAR 07)","E008",0.0,0.0,0.0,"Argentina",0,0,0,0
"D837","Datum Geodesi Nasional 1995 (DGN95)","E012",0.0,0.0,0.0,"Indonesia",0,0,0,0
"D838","Korea 1995","E012",0.0,0.0,0.0,"South Korea",0,0,0,0
"D839","Institut Geographique du Congo Belge (IGCB) 1955","E001",-79.9,-158,-168.9,"The Democratic Republic of the Congo (Zaire) - Lower Congo",0,0,0,0
"D894","WGS 1984 semi-major","E020",0,0,0,"WGS 1984 Auxiliary Sphere semi-major axis",0,0,0,0
"D895","ATS77","E910",-95.323,166.098,-69.942,"Maritime Provinces",0,0,0,0,0.215,1.031,-0.047,1.922
"D896","GosatCAIL1B+ EarthRadius","E025",0,0,0,"GosatCAIL1B+ EarthRadius",0,0,0,0
"D897","Myanmar","E227",247,785,277,"Myanmar",0,0,0,0
"D900","China 2000","E231",0,0,0,"China 2000",0,0,0,0
"D901","Nouvelle Triangulation Francaise (grid shift)","E202","France","-D350","ntf_r93.gsb"
"D902","PRS92","E000",-127.62153,-67.24339,-47.04738,"Philippines Reference System 1992",0,0,0,0,3.06803,-4.90297,-1.57807,-1.06002
"D903","North American 1983 2011","E008",0,0,0,"Alaska, Canada, CONUS, Central America, Mexico",2,2,2,354

View File

@@ -1,57 +1,69 @@
!
! By email on December 2nd, 2010:
! From https://github.com/OSGeo/gdal/issues/8034, June 30, 2023
!
! I, Louis Burry, on behalf of PCI Geomatics agree to allow the ellips.txt
! I, Michael Goldberg, on behalf of PCI Geomatics agree to allow the ellips.txt
! and datum.txt file to be distributed under the GDAL open source license.
!
! Louis Burry
! VP Technology & Delivery
! Michael Goldberg
! Development Manager
! PCI Geomatics
!
! PCI Ellipsoid Database
! ----------------------
! This file lists the different reference ellipsoids that may
! be used by PCI coordinate systems. Ellipsoid entries in datum.txt
! refer to entries in this file.
!
! Each ellipsoid is listed on a single line. The format of each record
! is as follows:
!
! Ellipsoid_code, Description_string, Semimajor_axis_m, Semiminor_axis_m [,extra comments]
!
! Ellipsoid_code is the code that uniquely identifies the ellipsoid
! within PCI software
! Description_string is a short description that helps users to identify
! the ellipsoid. It may be listed, for example, in a dropdown list in
! a PCI dialog box.
! Semimajor_axis_m is the ellipsoid semi-major (equatorial) axis length in metres.
! Semiminor_axis_m is the ellipsoid semi-minor (polar) axis length in metres.
!
! Any extra fields may be added after these four elements if desired; they will
! not be read by PCI software but may be helpful for the user.
!
! NOTE: The range of "E908" to "E998" is set aside for
! the use of local customer development.
!
"E009","Airy 1830",6377563.396,6356256.91
"E011","Modified Airy",6377340.189,6356034.448
"E910","ATS77",6378135.0,6356750.304922
"E014","Australian National 1965",6378160.,6356774.719
"E002","Bessel 1841",6377397.155,6356078.96284
"E900","Bessel 1841 (Namibia)",6377483.865,6356165.382966
"E333","Bessel 1841 (Japan By Law)",6377397.155,6356078.963
"E000","Clarke 1866",6378206.4,6356583.8
"E001","Clarke 1880 (RGS)",6378249.145,6356514.86955
"E202","Clarke 1880 (IGN, France)",6378249.2,6356515.0
"E006","Everest (India 1830)",6377276.3452,6356075.4133
"E010","Everest (W. Malaysia and Singapore 1948)",6377304.063,6356103.039
"E901","Everest (India 1956)",6377301.243,6356100.228368
"E902","Everest (W. Malaysia 1969)",6377295.664,6356094.667915
"E903","Everest (E. Malaysia and Brunei)",6377298.556,6356097.550301
"E201","Everest (Pakistan)",6377309.613,6356108.570542
"E017","Fischer 1960",6378166.,6356784.283666
"E013","Modified Fischer 1960",6378155.,6356773.3205
"E018","Fischer 1968",6378150.,6356768.337303
"E008","GRS 1980",6378137.,6356752.31414
"E904","Helmert 1906",6378200.,6356818.169628
"E016","Hough 1960",6378270.,6356794.343479
"E200","Indonesian 1974",6378160.,6356774.504086
"E004","International 1924",6378388.,6356911.94613
"E203","IUGG 67",6378160.,6356774.516090714
"E015","Krassovsky 1940",6378245.,6356863.0188
"E700","MODIS (Sphere from WGS84)",6371007.181,6371007.181
"E002","Bessel 1841",6377397.155,6356078.96284
"E003","New International 1967",6378157.5,6356772.2
"E019","Normal Sphere",6370997.,6370997.
"E905","SGS 85",6378136.,6356751.301569
"E907","South American 1969",6378160.,6356774.719
"E906","WGS 60",6378165.,6356783.286959
"E007","WGS 66",6378145.,6356759.769356
"E004","International 1924",6378388.,6356911.94613
"E005","WGS 72",6378135.,6356750.519915
"E006","Everest (India 1830)",6377276.3452,6356075.4133
"E007","WGS 66",6378145.,6356759.769356
"E008","GRS 1980",6378137.,6356752.31414
"E009","Airy 1830",6377563.396,6356256.91
"E010","Everest (W. Malaysia and Singapore 1948)",6377304.063,6356103.039
"E011","Modified Airy",6377340.189,6356034.448
"E012","WGS 84",6378137.,6356752.314245
"E013","Modified Fischer 1960",6378155.,6356773.3205
"E014","Australian National 1965",6378160.,6356774.719
"E015","Krassovsky 1940",6378245.,6356863.0188
"E016","Hough 1960",6378270.,6356794.343479
"E017","Fischer 1960",6378166.,6356784.283666
"E018","Fischer 1968",6378150.,6356768.337303
"E019","Normal Sphere",6370997.,6370997.
"E020","WGS 84 semimajor axis",6378137.,6378137.
"E600","D-PAF (Orbits)",6378144.0,6356759.0
"E601","Test Data Set 1",6378144.0,6356759.0
"E602","Test Data Set 2",6377397.2,6356079.0
"E204","War Office",6378300.583,6356752.270
"E021","WGS 84 semiminor axis",6356752.314245,6356752.314245
"E022","Clarke 1866 Authalic Sphere", 6370997.000000, 6370997.000000
"E023","GRS 1980 Authalic Sphere", 6371007.000000, 6371007.000000
"E024","International 1924 Authalic Sphere", 6371228.000000, 6371228.000000
"E025","GosatCAIL1B+ EarthRadius",6371008.77138,6371008.77138
"E200","Indonesian 1974",6378160.,6356774.504086
"E201","Everest (Pakistan)",6377309.613,6356108.570542
"E202","Clarke 1880 (IGN, France)",6378249.2,6356515.0
"E203","IUGG 67",6378160.,6356774.516090714
"E204","War Office",6378300.000,6356751.689189
"E205","Clarke 1880 Arc",6378249.145,6356514.966
"E206","Bessel Modified",6377492.018,6356173.5087
"E207","Clarke 1858",6378293.639,6356617.98149
@@ -61,17 +73,57 @@
"E211","Everest Modified",6377304.063,6356103.039
"E212","Modified Everest 1969",6377295.664,6356094.668
"E213","Everest (1967 Definition)",6377298.556,6356097.550
"E214","Clarke 1880 (Benoit)",6378300.79,6356566.43
"E214","Clarke 1880 (Benoit)",6378300.789000,6356566.435000
"E215","Clarke 1880 (SGA)",6378249.2,6356515.0
"E216","Everest (1975 Definition)",6377299.151,6356098.1451
"E217","GEM 10C",6378137,6356752.31414
"E218","OSU 86F",6378136.2,6356751.516672
"E219","OSU 91A",6378136.3,6356751.6163367
"E220","Sphere",6371000,6371000
"E221","Struve 1860",6378297,6356655.847
"E221","Struve 1860",6378298.300000,6356657.142670
"E222","Walbeck",6376896,6355834.847
"E223","Plessis 1817",6376523,6355862.933
"E224","Xian 1980",6378140.0,6356755.288
"E225","EMEP Sphere",6370000,6370000
"E226","Everest (India and Nepal)",6377301.243,6356100.228368
"E227","Everest (1830 Definition)", 6377299.365595, 6356098.359005,"EPSG:7042"
"E228","Danish 1876", 6377019.270000, 6355762.539100
"E229","Bessel Namibia (GLM)", 6377483.865280, 6356165.383246
"E230","PZ-90", 6378136.000000, 6356751.361746
"E231","CGCS2000", 6378137.000000, 6356752.314140
"E232","IAG 1975", 6378140.000000, 6356755.288158
"E233","NWL 9D", 6378145.000000, 6356759.769489
"E234","Hughes 1980", 6378273.000000, 6356889.449000
"E235","Clarke 1880 (international foot)", 6378306.369600, 6356571.996000
"E236","Clarke 1866 Michigan", 6378450.047549, 6356826.621488
"E237","APL 4.5 (1968)", 6378144.000000, 6356757.338698
"E238","Airy (War Office)", 6377542.178, 6356235.764
"E239","Clarke 1858 (DIGEST)", 6378235.600, 6356560.140
"E240","Clarke 1880 (Palestine)", 6378300.782, 6356566.427
"E241","Clarke 1880 (Syria)", 6378247.842, 6356513.671
"E242","Clarke 1880 (Fiji)", 6378301.000, 6356566.548
"E243","Andrae", 6377104.430, 6355847.415
"E244","Delambre 1810", 6376985.228, 6356323.664
"E245","Delambre (Carte de France)", 6376985.000, 6356323.436
"E246","Germaine (Djibouti)", 6378284.000, 6356589.156
"E247","Hayford 1909", 6378388.000, 6356909.000
"E248","Krayenhoff 1827", 6376950.400, 6356356.341
"E249","Plessis Reconstituted", 6376523.994, 6355862.907
"E250","GRS 1967", 6378160.000, 6356774.516
"E251","Svanberg", 6376797.000, 6355837.971
"E252","Walbeck 1819 (Planheft 1942)", 6376895.000, 6355834.000
"E333","Bessel 1841 (Japan By Law)",6377397.155,6356078.963
"E600","D-PAF (Orbits)",6378144.0,6356759.0
"E601","Test Data Set 1",6378144.0,6356759.0
"E602","Test Data Set 2",6377397.2,6356079.0
"E700","MODIS (Sphere from WGS84)",6371007.181,6371007.181
"E899","GRS 1967 Modified",6378160.,6356774.719195306
"E900","Bessel 1841 (Namibia)",6377483.865,6356165.382966
"E901","Everest (India 1956)",6377301.243,6356100.228368
"E902","Everest (W. Malaysia 1969)",6377295.664,6356094.667915
"E903","Everest (E. Malaysia and Brunei)",6377298.556,6356097.550301
"E904","Helmert 1906",6378200.,6356818.169628
"E905","SGS 85",6378136.,6356751.301569
"E906","WGS 60",6378165.,6356783.286959
"E907","South American 1969",6378160.,6356774.719
"E910","ATS77",6378135.0,6356750.304922

View File

@@ -1,28 +0,0 @@
<mapml>
<head>
<title>states</title>
<base href="${URL}" />
<meta charset="utf-8" />
<meta content="text/mapml;projection=${TILING_SCHEME}" http-equiv="Content-Type" />
<!--
<link href="http://localhost:8080/myservice/mapml/WGS84?style=" rel="alternate" projection="WGS84" />
<link href="http://localhost:8080/myservice/mapml/OSMTILE?style=" rel="alternate" projection="OSMTILE" />
<link href="http://localhost:8080/myservice/mapml/CBMTILE?style=" rel="alternate" projection="CBMTILE" />
<link href="http://localhost:8080/myservice/mapml/APSTILE?style=" rel="alternate" projection="APSTILE" />
-->
</head>
<body>
<extent units="${TILING_SCHEME}">
<input name="z" type="zoom" value="${CURZOOM}" min="${MINZOOM}" max="${MAXZOOM}" />
<input name="x" type="location" axis="column" units="tilematrix" min="${MINTILEX}" max="${MAXTILEX}" />
<input name="y" type="location" axis="row" units="tilematrix" min="${MINTILEY}" max="${MAXTILEY}" />
<link tref="${URL}{z}/{x}/{y}.${TILEEXT}" rel="tile" />
<!--<link tref="http://localhost:8080/myservice/wmts?layer=MYLAYER&amp;style=&amp;tilematrixset=${TILING_SCHEME}&amp;service=WMTS&amp;request=GetTile&amp;version=1.0.0&amp;tilematrix={z}&amp;TileCol={x}&amp;TileRow={y}&amp;format=image/png" rel="tile" />-->
<!--
<input name="i" type="location" axis="i" units="tile" />
<input name="j" type="location" axis="j" units="tile" />
<link tref="http://localhost:8080/myservice/wmts/?LAYER=MYLAYER&amp;TILEMATRIX={z}&amp;TileCol={x}&amp;TileRow={y}&amp;TILEMATRIXSET=WGS84&amp;SERVICE=WMTS&amp;VERSION=1.0.0&amp;REQUEST=GetFeatureInfo&amp;FEATURE_COUNT=50&amp;FORMAT=image/png&amp;STYLE=&amp;INFOFORMAT=text/mapml&amp;I={i}&amp;J={j}" rel="query" />
-->
</extent>
</body>
</mapml>

View File

@@ -3,13 +3,13 @@
"spdxVersion": "SPDX-2.2",
"dataLicense": "CC0-1.0",
"SPDXID": "SPDXRef-DOCUMENT",
"documentNamespace": "https://spdx.org/spdxdocs/gdal-x64-linux-dynamic-3.7.2-fd4cb879-6dde-45a2-9a33-51a5568717bc",
"name": "gdal:x64-linux-dynamic@3.7.2 d752f0ae62cbfd7d85fb923d58f88ea3dff3d13eb45f8b3b3b7590b841a97f64",
"documentNamespace": "https://spdx.org/spdxdocs/gdal-x64-linux-dynamic-release-3.9.1-b1c6bf63-bc86-4c2d-b1b0-814c8558dd6b",
"name": "gdal:x64-linux-dynamic-release@3.9.1 e8268937ca64c8e9b62f162b1b1582b4046b026c9768bde14b60a485a2ae04d0",
"creationInfo": {
"creators": [
"Tool: vcpkg-ac02a9f660977426b8ec6392919fbb1d51b10998"
"Tool: vcpkg-2024-06-10-02590c430e4ed9215d27870138c2e579cc338772"
],
"created": "2023-10-25T19:08:40Z"
"created": "2024-09-28T15:35:11Z"
},
"relationships": [
{
@@ -117,8 +117,8 @@
{
"name": "gdal",
"SPDXID": "SPDXRef-port",
"versionInfo": "3.7.2",
"downloadLocation": "git+https://github.com/Microsoft/vcpkg@d01864aaa21a85e1e8f7bb6748d607e953c52e77",
"versionInfo": "3.9.1",
"downloadLocation": "git+https://github.com/Microsoft/vcpkg@65b271a32a93a9dfa4dfd3905fcd0b5388926f81",
"homepage": "https://gdal.org",
"licenseConcluded": "NOASSERTION",
"licenseDeclared": "NOASSERTION",
@@ -127,9 +127,9 @@
"comment": "This is the port (recipe) consumed by vcpkg."
},
{
"name": "gdal:x64-linux-dynamic",
"name": "gdal:x64-linux-dynamic-release",
"SPDXID": "SPDXRef-binary",
"versionInfo": "d752f0ae62cbfd7d85fb923d58f88ea3dff3d13eb45f8b3b3b7590b841a97f64",
"versionInfo": "e8268937ca64c8e9b62f162b1b1582b4046b026c9768bde14b60a485a2ae04d0",
"downloadLocation": "NONE",
"licenseConcluded": "NOASSERTION",
"licenseDeclared": "NOASSERTION",
@@ -139,70 +139,34 @@
{
"SPDXID": "SPDXRef-resource-1",
"name": "OSGeo/gdal",
"downloadLocation": "git+https://github.com/OSGeo/gdal@v3.7.2",
"downloadLocation": "git+https://github.com/OSGeo/gdal@v3.9.1",
"licenseConcluded": "NOASSERTION",
"licenseDeclared": "NOASSERTION",
"copyrightText": "NOASSERTION",
"checksums": [
{
"algorithm": "SHA512",
"checksumValue": "95b0dee07a616c8fb26ded2c538a6933ba070c0567e88af9356daea9b1df6c910edb4fcf55766839c1873829d20948b1714b3e2285e5ac57de8fcf0970ff53ff"
"checksumValue": "d9ab5d94dc870df17b010166d3ebbe897a1f673ba05bf31cd4bed437b6db303dd9e373ba5099d3a191ff3e48c995556fb5bcc77d03d975614df4aa20a2c2b085"
}
]
}
],
"files": [
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/d01864aaa21a85e1e8f7bb6748d607e953c52e77/libkml.patch",
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/65b271a32a93a9dfa4dfd3905fcd0b5388926f81/cmake-project-include.cmake",
"SPDXID": "SPDXRef-file-0",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "fe888df8a7c9e468cdd87640c025f48f165d5264af1fa20604bd60859e6f000f"
"checksumValue": "60c0f79155c78ec0ec4ccdc77e00f4613ae4630c6697f51f884bf8f979a48593"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/d01864aaa21a85e1e8f7bb6748d607e953c52e77/fix-jpeg.patch",
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/65b271a32a93a9dfa4dfd3905fcd0b5388926f81/usage",
"SPDXID": "SPDXRef-file-1",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "28a04f01c92e6b99a5e918f38333c8245089a6ba2f67191e9fbc54288cfa941e"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/d01864aaa21a85e1e8f7bb6748d607e953c52e77/vcpkg.json",
"SPDXID": "SPDXRef-file-2",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "2a27cd0721793c5e2a4e66a834f3cf25f76ffafc178e1d6d703fba9e78139bd4"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/d01864aaa21a85e1e8f7bb6748d607e953c52e77/cmake-project-include.cmake",
"SPDXID": "SPDXRef-file-3",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "71c0138c71e6286cf8391e562775d853d3a29f5d58597ecc1b47a8bfb8b6412c"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/d01864aaa21a85e1e8f7bb6748d607e953c52e77/usage",
"SPDXID": "SPDXRef-file-4",
"checksums": [
{
"algorithm": "SHA256",
@@ -213,44 +177,68 @@
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/d01864aaa21a85e1e8f7bb6748d607e953c52e77/vcpkg-cmake-wrapper.cmake",
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/65b271a32a93a9dfa4dfd3905fcd0b5388926f81/portfile.cmake",
"SPDXID": "SPDXRef-file-2",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "92a0c437215b5fed404a2deb00ade3a8cfb4da00c518af38525a3194a7d89c1a"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/65b271a32a93a9dfa4dfd3905fcd0b5388926f81/libkml.patch",
"SPDXID": "SPDXRef-file-3",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "fe888df8a7c9e468cdd87640c025f48f165d5264af1fa20604bd60859e6f000f"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/65b271a32a93a9dfa4dfd3905fcd0b5388926f81/vcpkg.json",
"SPDXID": "SPDXRef-file-4",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "71bb644d1501bdc16fbe5e88a45a27c478bee667cbace588c569de28778c89ff"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/65b271a32a93a9dfa4dfd3905fcd0b5388926f81/find-link-libraries.patch",
"SPDXID": "SPDXRef-file-5",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "c507eaa077072e9877607fd5f70381eebf19900661e2e1fd099d84c4df1b8c24"
"checksumValue": "043cbdd6298fce33c29d128241470b71990dc13eb63bfa44b3d82b17f5384468"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/d01864aaa21a85e1e8f7bb6748d607e953c52e77/portfile.cmake",
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/65b271a32a93a9dfa4dfd3905fcd0b5388926f81/target-is-valid.patch",
"SPDXID": "SPDXRef-file-6",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "47aff7e31c70f25ca782b23c4e93004c989cd346d0d18ea9737545283779573f"
"checksumValue": "6a369356c57860f97bd756d3604e7219774e2bfe5c74e5e0178496fad253900f"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/d01864aaa21a85e1e8f7bb6748d607e953c52e77/find-link-libraries.patch",
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/65b271a32a93a9dfa4dfd3905fcd0b5388926f81/fix-gdal-target-interfaces.patch",
"SPDXID": "SPDXRef-file-7",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "9e390300ab79cf6fef1bb009416346a1f98d4946cb14c71ee4ea46217e3cb9d2"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/d01864aaa21a85e1e8f7bb6748d607e953c52e77/fix-gdal-target-interfaces.patch",
"SPDXID": "SPDXRef-file-8",
"checksums": [
{
"algorithm": "SHA256",
@@ -259,6 +247,18 @@
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/gdal/65b271a32a93a9dfa4dfd3905fcd0b5388926f81/vcpkg-cmake-wrapper.cmake",
"SPDXID": "SPDXRef-file-8",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "c507eaa077072e9877607fd5f70381eebf19900661e2e1fd099d84c4df1b8c24"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
}
]
}

View File

@@ -1,40 +1,41 @@
cmake 3.27.5
cmake-project-include.cmake 71c0138c71e6286cf8391e562775d853d3a29f5d58597ecc1b47a8bfb8b6412c
curl 5a630f18174e3f39ad6a88df7c3b93b2270527ea4b19fc579305505d17b83995
expat 228e0fb7f10543ebd712496b6753032264bca5578f884828afc127fab3390dda
features core;curl;expat;geos;jpeg;lerc;png;qhull;recommended-features;sqlite3
find-link-libraries.patch 9e390300ab79cf6fef1bb009416346a1f98d4946cb14c71ee4ea46217e3cb9d2
cmake 3.30.2
cmake-project-include.cmake 60c0f79155c78ec0ec4ccdc77e00f4613ae4630c6697f51f884bf8f979a48593
curl 4e47bc89fe251262d304424d5c2aeb86c9a8f6c51a1d681c442b1a0db952b8a4
expat 815990947db743a709c88ac9ccf40e997a4087a99e4011e4dd34444c4495fc81
features core;curl;expat;geos;iconv;jpeg;lerc;png;qhull;recommended-features;sqlite3
find-link-libraries.patch 043cbdd6298fce33c29d128241470b71990dc13eb63bfa44b3d82b17f5384468
fix-gdal-target-interfaces.patch 9dbe9d5e0dbc3a21370bb3560aa32811d10cc0b19e4b6833cd7258699d206215
fix-jpeg.patch 28a04f01c92e6b99a5e918f38333c8245089a6ba2f67191e9fbc54288cfa941e
geos 920fd3a55a353e2d9d88cb0618bd0c36a8267c0ebfc07040d23936f6c5c547aa
json-c f3d5a2cb0c9fc5e3943d79b3df488062d4d4f4d08848112981dccd98af07baf1
lerc ecbbf653c8f0c796f0e18f83b37c3a00214094f3b1e62f49811f25acae70b45c
libgeotiff b1d6e140841757188fe532584a6715de807b842b54ffdd36387db879ad121d0c
libjpeg-turbo a5587f913fdc8c6b5ac389f48cc735eb1882a56795a0660c88aeafbdfdb8a2f7
geos 61c16e5774041fdf25de7838da49c82db9de6ffdc3c5a1e182b4dbed7748fabd
json-c 9375368151ef1286f2fd07b33caa9d471bb5abc0a4549fd003a6812b127283a9
lerc 80a8808cf75be908d9cd81fa00a1d62fdcc92429f854ea53370ba3f5ed4a935f
libgeotiff 9721aa3a8b06025a3313d5ad64bc0cf95fe8346e785af55acb89a41c8051de8f
libiconv 8eecbe3acd145f92af4ca65174d4aedab696257f04f1c4938b8ab33672bce8ca
libjpeg-turbo a09c75aa61f38fa0fad85aaccd7cdfdbf37b06fab24c1b6b7b3ade75f0fe688d
libkml.patch fe888df8a7c9e468cdd87640c025f48f165d5264af1fa20604bd60859e6f000f
libpng 0fa3ac702656db863ab18f963a80ca498ad751ef376a3aa3c708468165bb3e24
pkgconf ed402270d63bd046d62d402f30cdb50abc6c646b796a474d8dc44f0bac1d7e7e
portfile.cmake 47aff7e31c70f25ca782b23c4e93004c989cd346d0d18ea9737545283779573f
ports.cmake 5a8e00cedff0c898b1f90f7d129329d0288801bc9056562b039698caf31ff3f3
libpng b2bee40b469cacf950388bfc93cf6b17ed6abfc8399eeec44934d212e3b4ffbc
pkgconf cd4b3b9f880b4bb762f4d4c665166831469e8c35e12fc7b6b20efd4706267920
portfile.cmake 92a0c437215b5fed404a2deb00ade3a8cfb4da00c518af38525a3194a7d89c1a
ports.cmake 3581688c16865b2026d28240d7c6257b848475ffc9fe81b3f3d304973253cf5e
post_build_checks 2
proj 790c2ab7942a33c34846f162796b4ea27574760fbde47656105c33d7f285ebbd
qhull f3c02cfbdd9202c9c9354ec017b09596818e230d68a1a4a1f8d439fe3032a37f
sqlite3 f86216823174c16c16ed1d33c1038759b2027189388633132d575e70d97656c8
tiff c271063333073276f88917c961b3c1e15d42746004d329004ffcb4a0dc3e5dae
triplet x64-linux-dynamic
triplet_abi 96ce2ff9564a8b4b9a180f0ffb82b6584a706982ae668e328ffc092ba334721f-b3b2cd43408edc7b2c5525859d9730edbfba2e4068956c6607a21d0abdaef072-ca0d0c5903d7f43fb3dd8f702ecaa82569b715ec
proj 7a1ad719fa19418b0ccf3e6bb837ab34ed4fcc99911f810a598dfc0a2960b42c
qhull f68e9ff8accf8dcf841dd957a4c04c9272f5d722a065c6620c4b94258e7f3d9e
sqlite3 47c85d0bed92146a407dcc0edb61d0f4f8a20556dd62a6206bf5d42ced880e30
target-is-valid.patch 6a369356c57860f97bd756d3604e7219774e2bfe5c74e5e0178496fad253900f
tiff 34d0c3db5c2929ab6f7b2a999fc538b38e2836eeb877278f2f017fee015eb5c3
triplet x64-linux-dynamic-release
triplet_abi 96ce2ff9564a8b4b9a180f0ffb82b6584a706982ae668e328ffc092ba334721f-8f20f723a25e9787363b95fcfd14b63d82b71ac3e295a73c97d09e681decb90c-7fb5c0e06a13ccc6d1603dc7d10780c1a92b2b78
usage c85584261e2011a94b86f04c3a28dd2e165c9e6b47677a9bee26a3d387bc05f2
vcpkg-cmake a433c5e70a35b07c67b910e7a8b1c55f3863c3927c2e874d56544673dfb5774f
vcpkg-cmake-config fe2f5adbb708be2b9a7caa1e73549d92dd41b68173ed2f187aad834b217b4491
vcpkg-cmake c66dd076f3f67e579e976f89b74e524cdedfc182bb5cad640717708f93b8c6a0
vcpkg-cmake-config f7f14b802b782cd918b1e9be7c6580ffb50136c5ac6f65c0a5b78c4e0466820d
vcpkg-cmake-wrapper.cmake c507eaa077072e9877607fd5f70381eebf19900661e2e1fd099d84c4df1b8c24
vcpkg-pkgconfig-get-modules 305bd6e7c5f056e620624e480170fd917f1959a43458eda008ea05c647f86171
vcpkg.json 2a27cd0721793c5e2a4e66a834f3cf25f76ffafc178e1d6d703fba9e78139bd4
vcpkg-pkgconfig-get-modules 535080c47934c9c171b4c747ee6a8698cd07d537b23401640ef849130d8f0c3b
vcpkg.json 71bb644d1501bdc16fbe5e88a45a27c478bee667cbace588c569de28778c89ff
vcpkg_check_features 943b217e0968d64cf2cb9c272608e6a0b497377e792034f819809a79e1502c2b
vcpkg_copy_pdbs d57e4f196c82dc562a9968c6155073094513c31e2de475694143d3aa47954b1c
vcpkg_copy_tools 3d45ff761bddbabe8923b52330168dc3abd295fa469d3f2e47cb14dce85332d5
vcpkg_fixup_pkgconfig 588d833ff057d3ca99c14616c7ecfb5948b5e2a9e4fc02517dceb8b803473457
vcpkg_from_git 8f27bff0d01c6d15a3e691758df52bfbb0b1b929da45c4ebba02ef76b54b1881
vcpkg_fixup_pkgconfig 1a15f6c6d8e2b244d83a7514a0412d339127d2217d1df60ad1388b546c85f777
vcpkg_from_git 96ed81968f76354c00096dd8cd4e63c6a235fa969334a11ab18d11c0c512ff58
vcpkg_from_github b743742296a114ea1b18ae99672e02f142c4eb2bef7f57d36c038bedbfb0502f
vcpkg_install_copyright ba6c169ab4e59fa05682e530cdeb883767de22c8391f023d4e6844a7ec5dd3d2
vcpkg_replace_string d43c8699ce27e25d47367c970d1c546f6bc36b6df8fb0be0c3986eb5830bd4f1
zlib 2985ad8fb9f4ed298ef24f8add79f28cb17d4695c50e6bc7d3c786fed960a9cf
vcpkg_replace_string b450deb79207478b37119743e00808ebc42de0628e7b98c14ab24728bd5c78b8
zlib 33478f2ce98419fe311bf0f5c7b232a77f47e3be443479f1ecb95e79acdf75ba

View File

@@ -1,24 +1,24 @@
"""Functions for reading and writing GeoPandas dataframes."""
import os
import warnings
import numpy as np
from pyogrio._compat import HAS_GEOPANDAS, PANDAS_GE_15, PANDAS_GE_20
from pyogrio._compat import HAS_GEOPANDAS, PANDAS_GE_15, PANDAS_GE_20, PANDAS_GE_22
from pyogrio.errors import DataSourceError
from pyogrio.raw import (
DRIVERS_NO_MIXED_SINGLE_MULTI,
DRIVERS_NO_MIXED_DIMENSIONS,
detect_write_driver,
DRIVERS_NO_MIXED_SINGLE_MULTI,
_get_write_path_driver,
read,
read_arrow,
write,
)
from pyogrio.errors import DataSourceError
import warnings
def _stringify_path(path):
"""
Convert path-like to a string if possible, pass-through other objects
"""
"""Convert path-like to a string if possible, pass-through other objects."""
if isinstance(path, str):
return path
@@ -33,10 +33,12 @@ def _stringify_path(path):
def _try_parse_datetime(ser):
import pandas as pd # only called when pandas is known to be installed
if PANDAS_GE_20:
datetime_kwargs = dict(format="ISO8601", errors="ignore")
if PANDAS_GE_22:
datetime_kwargs = {"format": "ISO8601"}
elif PANDAS_GE_20:
datetime_kwargs = {"format": "ISO8601", "errors": "ignore"}
else:
datetime_kwargs = dict(yearfirst=True)
datetime_kwargs = {"yearfirst": True}
with warnings.catch_warnings():
warnings.filterwarnings(
"ignore",
@@ -48,10 +50,13 @@ def _try_parse_datetime(ser):
try:
res = pd.to_datetime(ser, **datetime_kwargs)
except Exception:
pass
res = ser
# if object dtype, try parse as utc instead
if res.dtype == "object":
res = pd.to_datetime(ser, utc=True, **datetime_kwargs)
try:
res = pd.to_datetime(ser, utc=True, **datetime_kwargs)
except Exception:
pass
if res.dtype != "object":
# GDAL only supports ms precision, convert outputs to match.
@@ -82,10 +87,12 @@ def read_dataframe(
sql_dialect=None,
fid_as_index=False,
use_arrow=None,
on_invalid="raise",
arrow_to_pandas_kwargs=None,
**kwargs,
):
"""Read from an OGR data source to a GeoPandas GeoDataFrame or Pandas DataFrame.
If the data source does not have a geometry column or ``read_geometry`` is False,
a DataFrame will be returned.
@@ -94,20 +101,23 @@ def read_dataframe(
Parameters
----------
path_or_buffer : pathlib.Path or str, or bytes buffer
A dataset path or URI, or raw buffer.
A dataset path or URI, raw buffer, or file-like object with a read method.
layer : int or str, optional (default: first layer)
If an integer is provided, it corresponds to the index of the layer
with the data source. If a string is provided, it must match the name
of the layer in the data source. Defaults to first layer in data source.
encoding : str, optional (default: None)
If present, will be used as the encoding for reading string values from
the data source, unless encoding can be inferred directly from the data
source.
the data source. By default will automatically try to detect the native
encoding and decode to ``UTF-8``.
columns : list-like, optional (default: all columns)
List of column names to import from the data source. Column names must
exactly match the names in the data source, and will be returned in
the order they occur in the data source. To avoid reading any columns,
pass an empty list-like.
pass an empty list-like. If combined with ``where`` parameter, must
include columns referenced in the ``where`` expression or the data may
not be correctly read; the data source may return empty results or
raise an exception (behavior varies by driver).
read_geometry : bool, optional (default: True)
If True, will read geometry into a GeoSeries. If False, a Pandas DataFrame
will be returned instead.
@@ -152,7 +162,12 @@ def read_dataframe(
the starting index is driver and file specific (e.g. typically 0 for
Shapefile and 1 for GeoPackage, but can still depend on the specific
file). The performance of reading a large number of features usings FIDs
is also driver specific.
is also driver specific and depends on the value of ``use_arrow``. The order
of the rows returned is undefined. If you would like to sort based on FID, use
``fid_as_index=True`` to have the index of the GeoDataFrame returned set to the
FIDs of the features read. If ``use_arrow=True``, the number of FIDs is limited
to 4997 for drivers with 'OGRSQL' as default SQL dialect. To read a larger
number of FIDs, set ``user_arrow=False``.
sql : str, optional (default: None)
The SQL statement to execute. Look at the sql_dialect parameter for more
information on the syntax to use for the query. When combined with other
@@ -184,6 +199,17 @@ def read_dataframe(
installed). When enabled, this provides a further speed-up.
Defaults to False, but this default can also be globally overridden
by setting the ``PYOGRIO_USE_ARROW=1`` environment variable.
on_invalid : str, optional (default: "raise")
The action to take when an invalid geometry is encountered. Possible
values:
- **raise**: an exception will be raised if a WKB input geometry is
invalid.
- **warn**: invalid WKB geometries will be returned as ``None`` and a
warning will be raised.
- **ignore**: invalid WKB geometries will be returned as ``None``
without a warning.
arrow_to_pandas_kwargs : dict, optional (default: None)
When `use_arrow` is True, these kwargs will be passed to the `to_pandas`_
call for the arrow to pandas conversion.
@@ -215,13 +241,13 @@ def read_dataframe(
https://arrow.apache.org/docs/python/generated/pyarrow.Table.html#pyarrow.Table.to_pandas
""" # noqa: E501
"""
if not HAS_GEOPANDAS:
raise ImportError("geopandas is required to use pyogrio.read_dataframe()")
import pandas as pd
import geopandas as gp
from geopandas.array import from_wkb
import pandas as pd
import shapely # if geopandas is present, shapely is expected to be present
path_or_buffer = _stringify_path(path_or_buffer)
@@ -279,10 +305,10 @@ def read_dataframe(
if PANDAS_GE_15 and wkb_values.dtype != object:
# for example ArrowDtype will otherwise create numpy array with pd.NA
wkb_values = wkb_values.to_numpy(na_value=None)
df["geometry"] = from_wkb(wkb_values, crs=meta["crs"])
df["geometry"] = shapely.from_wkb(wkb_values, on_invalid=on_invalid)
if force_2d:
df["geometry"] = shapely.force_2d(df["geometry"])
return gp.GeoDataFrame(df, geometry="geometry")
return gp.GeoDataFrame(df, geometry="geometry", crs=meta["crs"])
else:
return df
@@ -302,9 +328,9 @@ def read_dataframe(
if geometry is None or not read_geometry:
return df
geometry = from_wkb(geometry, crs=meta["crs"])
geometry = shapely.from_wkb(geometry, on_invalid=on_invalid)
return gp.GeoDataFrame(df, geometry=geometry)
return gp.GeoDataFrame(df, geometry=geometry, crs=meta["crs"])
# TODO: handle index properly
@@ -318,6 +344,7 @@ def write_dataframe(
promote_to_multi=None,
nan_as_null=True,
append=False,
use_arrow=None,
dataset_metadata=None,
layer_metadata=None,
metadata=None,
@@ -325,8 +352,7 @@ def write_dataframe(
layer_options=None,
**kwargs,
):
"""
Write GeoPandas GeoDataFrame to an OGR file format.
"""Write GeoPandas GeoDataFrame to an OGR file format.
Parameters
----------
@@ -335,16 +361,21 @@ def write_dataframe(
all values will be converted to strings to be written to the
output file, except None and np.nan, which will be set to NULL
in the output file.
path : str
path to file
layer :str, optional (default: None)
layer name
path : str or io.BytesIO
path to output file on writeable file system or an io.BytesIO object to
allow writing to memory. Will raise NotImplementedError if an open file
handle is passed; use BytesIO instead.
NOTE: support for writing to memory is limited to specific drivers.
layer : str, optional (default: None)
layer name to create. If writing to memory and layer name is not
provided, it layer name will be set to a UUID4 value.
driver : string, optional (default: None)
The OGR format driver used to write the vector file. By default write_dataframe
attempts to infer driver from path.
The OGR format driver used to write the vector file. By default attempts
to infer driver from path. Must be provided to write to memory.
encoding : str, optional (default: None)
If present, will be used as the encoding for writing string values to
the file.
the file. Use with caution, only certain drivers support encodings
other than UTF-8.
geometry_type : string, optional (default: None)
By default, the geometry type of the layer will be inferred from the
data, after applying the promote_to_multi logic. If the data only contains a
@@ -376,8 +407,17 @@ def write_dataframe(
append : bool, optional (default: False)
If True, the data source specified by path already exists, and the
driver supports appending to an existing data source, will cause the
data to be appended to the existing records in the data source.
data to be appended to the existing records in the data source. Not
supported for writing to in-memory files.
NOTE: append support is limited to specific drivers and GDAL versions.
use_arrow : bool, optional (default: False)
Whether to use Arrow as the transfer mechanism of the data to write
from Python to GDAL (requires GDAL >= 3.8 and `pyarrow` to be
installed). When enabled, this provides a further speed-up.
Defaults to False, but this default can also be globally overridden
by setting the ``PYOGRIO_USE_ARROW=1`` environment variable.
Using Arrow does not support writing an object-dtype column with
mixed types.
dataset_metadata : dict, optional (default: None)
Metadata to be stored at the dataset level in the output file; limited
to drivers that support writing metadata, such as GPKG, and silently
@@ -389,10 +429,10 @@ def write_dataframe(
metadata : dict, optional (default: None)
alias of layer_metadata
dataset_options : dict, optional
Dataset creation option (format specific) passed to OGR. Specify as
Dataset creation options (format specific) passed to OGR. Specify as
a key-value dictionary.
layer_options : dict, optional
Layer creation option (format specific) passed to OGR. Specify as
Layer creation options (format specific) passed to OGR. Specify as
a key-value dictionary.
**kwargs
Additional driver-specific dataset or layer creation options passed
@@ -402,23 +442,22 @@ def write_dataframe(
explicit `dataset_options` or `layer_options` keywords to manually
do this (for example if an option exists as both dataset and layer
option).
"""
# TODO: add examples to the docstring (e.g. OGR kwargs)
if not HAS_GEOPANDAS:
raise ImportError("geopandas is required to use pyogrio.write_dataframe()")
from geopandas.array import to_wkb
import pandas as pd
from pyproj.enums import WktVersion # if geopandas is available so is pyproj
path = str(path)
from geopandas.array import to_wkb
if not isinstance(df, pd.DataFrame):
raise ValueError("'df' must be a DataFrame or GeoDataFrame")
if driver is None:
driver = detect_write_driver(path)
if use_arrow is None:
use_arrow = bool(int(os.environ.get("PYOGRIO_USE_ARROW", "0")))
path, driver = _get_write_path_driver(path, driver, append=append)
geometry_columns = df.columns[df.dtypes == "geometry"]
if len(geometry_columns) > 1:
@@ -456,11 +495,11 @@ def write_dataframe(
# https://gdal.org/development/rfc/rfc56_millisecond_precision.html#core-changes
# Convert each row offset to a signed multiple of 15m and add to GMT value
gdal_offset_representation = tz_offset // pd.Timedelta("15m") + 100
gdal_tz_offsets[name] = gdal_offset_representation
gdal_tz_offsets[name] = gdal_offset_representation.values
else:
values = col.values
if isinstance(values, pd.api.extensions.ExtensionArray):
from pandas.arrays import IntegerArray, FloatingArray, BooleanArray
from pandas.arrays import BooleanArray, FloatingArray, IntegerArray
if isinstance(values, (IntegerArray, FloatingArray, BooleanArray)):
field_data.append(values._data)
@@ -473,6 +512,9 @@ def write_dataframe(
field_mask.append(None)
# Determine geometry_type and/or promote_to_multi
if geometry_column is not None:
geometry_types_all = geometry.geom_type
if geometry_column is not None and (
geometry_type is None or promote_to_multi is None
):
@@ -482,9 +524,10 @@ def write_dataframe(
# If there is data, infer layer geometry type + promote_to_multi
if not df.empty:
# None/Empty geometries sometimes report as Z incorrectly, so ignore them
has_z_arr = geometry[
(geometry != np.array(None)) & (~geometry.is_empty)
].has_z
with warnings.catch_warnings():
warnings.filterwarnings("ignore", r"GeoSeries\.notna", UserWarning)
geometry_notna = geometry.notna()
has_z_arr = geometry[geometry_notna & (~geometry.is_empty)].has_z
has_z = has_z_arr.any()
all_z = has_z_arr.all()
@@ -493,7 +536,7 @@ def write_dataframe(
f"Mixed 2D and 3D coordinates are not supported by {driver}"
)
geometry_types = pd.Series(geometry.type.unique()).dropna().values
geometry_types = pd.Series(geometry_types_all.unique()).dropna().values
if len(geometry_types) == 1:
tmp_geometry_type = geometry_types[0]
if promote_to_multi and tmp_geometry_type in (
@@ -539,7 +582,78 @@ def write_dataframe(
if epsg:
crs = f"EPSG:{epsg}"
else:
crs = geometry.crs.to_wkt(WktVersion.WKT1_GDAL)
crs = geometry.crs.to_wkt("WKT1_GDAL")
if use_arrow:
import pyarrow as pa
from pyogrio.raw import write_arrow
if geometry_column is not None:
# Convert to multi type
if promote_to_multi:
import shapely
mask_points = geometry_types_all == "Point"
mask_linestrings = geometry_types_all == "LineString"
mask_polygons = geometry_types_all == "Polygon"
if mask_points.any():
geometry[mask_points] = shapely.multipoints(
np.atleast_2d(geometry[mask_points]), axis=0
)
if mask_linestrings.any():
geometry[mask_linestrings] = shapely.multilinestrings(
np.atleast_2d(geometry[mask_linestrings]), axis=0
)
if mask_polygons.any():
geometry[mask_polygons] = shapely.multipolygons(
np.atleast_2d(geometry[mask_polygons]), axis=0
)
geometry = to_wkb(geometry.values)
df = df.copy(deep=False)
# convert to plain DataFrame to avoid warning from geopandas about
# writing non-geometries to the geometry column
df = pd.DataFrame(df, copy=False)
df[geometry_column] = geometry
table = pa.Table.from_pandas(df, preserve_index=False)
if geometry_column is not None:
# ensure that the geometry column is binary (for all-null geometries,
# this could be a wrong type)
geom_field = table.schema.field(geometry_column)
if not (
pa.types.is_binary(geom_field.type)
or pa.types.is_large_binary(geom_field.type)
):
table = table.set_column(
table.schema.get_field_index(geometry_column),
geom_field.with_type(pa.binary()),
table[geometry_column].cast(pa.binary()),
)
write_arrow(
table,
path,
layer=layer,
driver=driver,
geometry_name=geometry_column,
geometry_type=geometry_type,
crs=crs,
encoding=encoding,
append=append,
dataset_metadata=dataset_metadata,
layer_metadata=layer_metadata,
metadata=metadata,
dataset_options=dataset_options,
layer_options=layer_options,
**kwargs,
)
return
# If there is geometry data, prepare it to be written
if geometry_column is not None:

View File

@@ -42,7 +42,7 @@
<CARB> +proj=helmert +drx=0.000049 +dry=-0.001088 +drz=0.000664 +convention=position_vector
<EURA> +proj=helmert +drx=-0.000083 +dry=0.000534 +drz=0.000750 +convention=position_vector
<EURA> +proj=helmert +drx=-0.000083 +dry=-0.000534 +drz=0.000750 +convention=position_vector
<INDI> +proj=helmert +drx=0.001232 +dry=0.000303 +drz=0.001540 +convention=position_vector
@@ -75,7 +75,7 @@
<CARB_T> +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.000049 +dry=-0.001088 +drz=0.000664 +convention=position_vector
<EURA_T> +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000083 +dry=0.000534 +drz=0.000750 +convention=position_vector
<EURA_T> +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=-0.000083 +dry=-0.000534 +drz=0.000750 +convention=position_vector
<INDI_T> +proj=helmert +dx=0.00041 +dy=0.00022 +dz=0.00041 +drx=0.001232 +dry=0.000303 +drz=0.001540 +convention=position_vector

View File

@@ -1,9 +1,9 @@
# Version checking for PROJ
set (PACKAGE_VERSION "9.3.0")
set (PACKAGE_VERSION "9.4.1")
set (PACKAGE_VERSION_MAJOR "9")
set (PACKAGE_VERSION_MINOR "3")
set (PACKAGE_VERSION_PATCH "0")
set (PACKAGE_VERSION_MINOR "4")
set (PACKAGE_VERSION_PATCH "1")
# These variable definitions parallel those in PROJ's
# cmake/CMakeLists.txt.

View File

@@ -27,7 +27,8 @@ if("TRUE")
endif()
cmake_policy(POP)
find_dependency(unofficial-sqlite3 CONFIG)
find_dependency(SQLite3)
if(DEFINED PROJ_CONFIG_FIND_TIFF_DEP)
find_dependency(TIFF)
endif()

View File

@@ -3,11 +3,11 @@
if("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" LESS 2.8)
message(FATAL_ERROR "CMake >= 2.8.0 required")
endif()
if(CMAKE_VERSION VERSION_LESS "2.8.3")
message(FATAL_ERROR "CMake >= 2.8.3 required")
if(CMAKE_VERSION VERSION_LESS "2.8.12")
message(FATAL_ERROR "CMake >= 2.8.12 required")
endif()
cmake_policy(PUSH)
cmake_policy(VERSION 2.8.3...3.25)
cmake_policy(VERSION 2.8.12...3.28)
#----------------------------------------------------------------
# Generated CMake target import file.
#----------------------------------------------------------------
@@ -60,13 +60,9 @@ add_library(PROJ::proj STATIC IMPORTED)
set_target_properties(PROJ::proj PROPERTIES
INTERFACE_COMPILE_DEFINITIONS "PROJ_DLL="
INTERFACE_INCLUDE_DIRECTORIES "${_IMPORT_PREFIX}/include"
INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:unofficial::sqlite3::sqlite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:SQLite::SQLite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
)
if(CMAKE_VERSION VERSION_LESS 2.8.12)
message(FATAL_ERROR "This file relies on consumers using CMake 2.8.12 or greater.")
endif()
# Load information for each installed configuration.
file(GLOB _cmake_config_files "${CMAKE_CURRENT_LIST_DIR}/proj-targets-*.cmake")
foreach(_cmake_config_file IN LISTS _cmake_config_files)
@@ -80,9 +76,12 @@ set(_IMPORT_PREFIX)
# Loop over all imported files and verify that they actually exist
foreach(_cmake_target IN LISTS _cmake_import_check_targets)
foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}")
if(NOT EXISTS "${_cmake_file}")
message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file
if(CMAKE_VERSION VERSION_LESS "3.28"
OR NOT DEFINED _cmake_import_check_xcframework_for_${_cmake_target}
OR NOT IS_DIRECTORY "${_cmake_import_check_xcframework_for_${_cmake_target}}")
foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}")
if(NOT EXISTS "${_cmake_file}")
message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file
\"${_cmake_file}\"
but this file does not exist. Possible reasons include:
* The file was deleted, renamed, or moved to another location.
@@ -91,8 +90,9 @@ but this file does not exist. Possible reasons include:
\"${CMAKE_CURRENT_LIST_FILE}\"
but not all the files it references.
")
endif()
endforeach()
endif()
endforeach()
endif()
unset(_cmake_file)
unset("_cmake_import_check_files_for_${_cmake_target}")
endforeach()

View File

@@ -3,11 +3,11 @@
if("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" LESS 2.8)
message(FATAL_ERROR "CMake >= 2.8.0 required")
endif()
if(CMAKE_VERSION VERSION_LESS "2.8.3")
message(FATAL_ERROR "CMake >= 2.8.3 required")
if(CMAKE_VERSION VERSION_LESS "2.8.12")
message(FATAL_ERROR "CMake >= 2.8.12 required")
endif()
cmake_policy(PUSH)
cmake_policy(VERSION 2.8.3...3.25)
cmake_policy(VERSION 2.8.12...3.28)
#----------------------------------------------------------------
# Generated CMake target import file.
#----------------------------------------------------------------
@@ -60,13 +60,9 @@ add_library(PROJ4::proj STATIC IMPORTED)
set_target_properties(PROJ4::proj PROPERTIES
INTERFACE_COMPILE_DEFINITIONS "PROJ_DLL="
INTERFACE_INCLUDE_DIRECTORIES "${_IMPORT_PREFIX}/include"
INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:unofficial::sqlite3::sqlite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
INTERFACE_LINK_LIBRARIES "\$<LINK_ONLY:-lm>;\$<LINK_ONLY:-ldl>;\$<LINK_ONLY:-pthread>;\$<LINK_ONLY:SQLite::SQLite3>;\$<LINK_ONLY:>;\$<LINK_ONLY:TIFF::TIFF>;\$<LINK_ONLY:CURL::libcurl>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:ws2_32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:wldap32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:advapi32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:crypt32>>;\$<LINK_ONLY:\$<\$<CXX_COMPILER_ID:MSVC>:normaliz>>"
)
if(CMAKE_VERSION VERSION_LESS 2.8.12)
message(FATAL_ERROR "This file relies on consumers using CMake 2.8.12 or greater.")
endif()
# Load information for each installed configuration.
file(GLOB _cmake_config_files "${CMAKE_CURRENT_LIST_DIR}/proj4-targets-*.cmake")
foreach(_cmake_config_file IN LISTS _cmake_config_files)
@@ -80,9 +76,12 @@ set(_IMPORT_PREFIX)
# Loop over all imported files and verify that they actually exist
foreach(_cmake_target IN LISTS _cmake_import_check_targets)
foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}")
if(NOT EXISTS "${_cmake_file}")
message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file
if(CMAKE_VERSION VERSION_LESS "3.28"
OR NOT DEFINED _cmake_import_check_xcframework_for_${_cmake_target}
OR NOT IS_DIRECTORY "${_cmake_import_check_xcframework_for_${_cmake_target}}")
foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}")
if(NOT EXISTS "${_cmake_file}")
message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file
\"${_cmake_file}\"
but this file does not exist. Possible reasons include:
* The file was deleted, renamed, or moved to another location.
@@ -91,8 +90,9 @@ but this file does not exist. Possible reasons include:
\"${CMAKE_CURRENT_LIST_FILE}\"
but not all the files it references.
")
endif()
endforeach()
endif()
endforeach()
endif()
unset(_cmake_file)
unset("_cmake_import_check_files_for_${_cmake_target}")
endforeach()

View File

@@ -2,7 +2,7 @@
"$id": "https://proj.org/schemas/v0.7/projjson.schema.json",
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Schema for PROJJSON (v0.7)",
"$comment": "This file exists both in data/ and in schemas/vXXX/. Keep both in sync. And if changing the value of $id, change PROJJSON_DEFAULT_VERSION accordingly in io.cpp",
"$comment": "This document is copyright Even Rouault and PROJ contributors, 2019-2023, and subject to the MIT license. This file exists both in data/ and in schemas/vXXX/. Keep both in sync. And if changing the value of $id, change PROJJSON_DEFAULT_VERSION accordingly in io.cpp",
"oneOf": [
{ "$ref": "#/definitions/crs" },

View File

@@ -3,13 +3,13 @@
"spdxVersion": "SPDX-2.2",
"dataLicense": "CC0-1.0",
"SPDXID": "SPDXRef-DOCUMENT",
"documentNamespace": "https://spdx.org/spdxdocs/proj-x64-linux-dynamic-9.3.0#1-f98b5c42-8f64-46e4-b067-2b40b1ff644d",
"name": "proj:x64-linux-dynamic@9.3.0#1 790c2ab7942a33c34846f162796b4ea27574760fbde47656105c33d7f285ebbd",
"documentNamespace": "https://spdx.org/spdxdocs/proj-x64-linux-dynamic-release-9.4.1-fe5308a0-0727-46af-bf1a-452582b07786",
"name": "proj:x64-linux-dynamic-release@9.4.1 7a1ad719fa19418b0ccf3e6bb837ab34ed4fcc99911f810a598dfc0a2960b42c",
"creationInfo": {
"creators": [
"Tool: vcpkg-ac02a9f660977426b8ec6392919fbb1d51b10998"
"Tool: vcpkg-2024-06-10-02590c430e4ed9215d27870138c2e579cc338772"
],
"created": "2023-10-25T18:56:46Z"
"created": "2024-09-28T15:27:15Z"
},
"relationships": [
{
@@ -47,11 +47,6 @@
"relationshipType": "CONTAINS",
"relatedSpdxElement": "SPDXRef-file-5"
},
{
"spdxElementId": "SPDXRef-port",
"relationshipType": "CONTAINS",
"relatedSpdxElement": "SPDXRef-file-6"
},
{
"spdxElementId": "SPDXRef-binary",
"relationshipType": "GENERATED_FROM",
@@ -86,19 +81,14 @@
"spdxElementId": "SPDXRef-file-5",
"relationshipType": "CONTAINED_BY",
"relatedSpdxElement": "SPDXRef-port"
},
{
"spdxElementId": "SPDXRef-file-6",
"relationshipType": "CONTAINED_BY",
"relatedSpdxElement": "SPDXRef-port"
}
],
"packages": [
{
"name": "proj",
"SPDXID": "SPDXRef-port",
"versionInfo": "9.3.0#1",
"downloadLocation": "git+https://github.com/Microsoft/vcpkg@6e31164b906c96903b8352e6a9211ae019672ac4",
"versionInfo": "9.4.1",
"downloadLocation": "git+https://github.com/Microsoft/vcpkg@dafa38417689eb52c17a425ace8e1f3ecfb74045",
"homepage": "https://proj.org/",
"licenseConcluded": "MIT",
"licenseDeclared": "NOASSERTION",
@@ -107,9 +97,9 @@
"comment": "This is the port (recipe) consumed by vcpkg."
},
{
"name": "proj:x64-linux-dynamic",
"name": "proj:x64-linux-dynamic-release",
"SPDXID": "SPDXRef-binary",
"versionInfo": "790c2ab7942a33c34846f162796b4ea27574760fbde47656105c33d7f285ebbd",
"versionInfo": "7a1ad719fa19418b0ccf3e6bb837ab34ed4fcc99911f810a598dfc0a2960b42c",
"downloadLocation": "NONE",
"licenseConcluded": "MIT",
"licenseDeclared": "NOASSERTION",
@@ -119,70 +109,34 @@
{
"SPDXID": "SPDXRef-resource-1",
"name": "OSGeo/PROJ",
"downloadLocation": "git+https://github.com/OSGeo/PROJ@9.3.0",
"downloadLocation": "git+https://github.com/OSGeo/PROJ@9.4.1",
"licenseConcluded": "NOASSERTION",
"licenseDeclared": "NOASSERTION",
"copyrightText": "NOASSERTION",
"checksums": [
{
"algorithm": "SHA512",
"checksumValue": "ee8170780c70e09efa4bc3fcf6ee9a2c15554a05a8562617fc5e9698fb33c6c0af380dd0de836db91955eb35623ded1fec67c6afe5fd3b692fcf4f4b3e4f0658"
"checksumValue": "4b3ceb9e3b2213b0bb2fc839f4dd70e08ee53323465c7bb473131907e4b66c836623da115c7413dfd8bafd0a992fa173003063e2233ab577139ab8462655b6cc"
}
]
}
],
"files": [
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/6e31164b906c96903b8352e6a9211ae019672ac4/vcpkg.json",
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/dafa38417689eb52c17a425ace8e1f3ecfb74045/fix-proj4-targets-cmake.patch",
"SPDXID": "SPDXRef-file-0",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "34915344411521c3bf421a755e44461aecfbb6567f3e0f9b619ea0ea3eb0ab89"
"checksumValue": "d76e1d419d3367dda3381fd11a637f3465bc838d611fa8ceaca20048c1b3cd6e"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/6e31164b906c96903b8352e6a9211ae019672ac4/fix-uwp.patch",
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/dafa38417689eb52c17a425ace8e1f3ecfb74045/fix-win-output-name.patch",
"SPDXID": "SPDXRef-file-1",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "8ca726f9cca8465fb5efdcff7f2dc0c9247fa0782f0bd1d1384d7912afb7c3b8"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/6e31164b906c96903b8352e6a9211ae019672ac4/usage",
"SPDXID": "SPDXRef-file-2",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "26169363c27e71a44cba9d703b22bbd13c191ab5e2d0612b3dd35c735c971fe6"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/6e31164b906c96903b8352e6a9211ae019672ac4/portfile.cmake",
"SPDXID": "SPDXRef-file-3",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "2d5a16a458eb429dfffb41c4e40c1815bfe58d2cdecff06ab5417fddd0cc82a5"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/6e31164b906c96903b8352e6a9211ae019672ac4/fix-win-output-name.patch",
"SPDXID": "SPDXRef-file-4",
"checksums": [
{
"algorithm": "SHA256",
@@ -193,20 +147,8 @@
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/6e31164b906c96903b8352e6a9211ae019672ac4/fix-proj4-targets-cmake.patch",
"SPDXID": "SPDXRef-file-5",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "0bcc51d69830a495a955cb97a8a1f91d9cec0410cbd8198f82f4fe60169d696f"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/6e31164b906c96903b8352e6a9211ae019672ac4/remove_toolset_restriction.patch",
"SPDXID": "SPDXRef-file-6",
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/dafa38417689eb52c17a425ace8e1f3ecfb74045/remove_toolset_restriction.patch",
"SPDXID": "SPDXRef-file-2",
"checksums": [
{
"algorithm": "SHA256",
@@ -215,6 +157,42 @@
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/dafa38417689eb52c17a425ace8e1f3ecfb74045/usage",
"SPDXID": "SPDXRef-file-3",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "26169363c27e71a44cba9d703b22bbd13c191ab5e2d0612b3dd35c735c971fe6"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/dafa38417689eb52c17a425ace8e1f3ecfb74045/portfile.cmake",
"SPDXID": "SPDXRef-file-4",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "c1e2909ff16c46a449fbb7e71e14ea3d33103491f21ce5191d899cafded1822f"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
},
{
"fileName": ".//opt/vcpkg/buildtrees/versioning_/versions/proj/dafa38417689eb52c17a425ace8e1f3ecfb74045/vcpkg.json",
"SPDXID": "SPDXRef-file-5",
"checksums": [
{
"algorithm": "SHA256",
"checksumValue": "0dbf128c799775bc808b756ed9800d1fd349f7d41b874ad78a204d17988b8737"
}
],
"licenseConcluded": "NOASSERTION",
"copyrightText": "NOASSERTION"
}
]
}

View File

@@ -1,28 +1,27 @@
cmake 3.27.5
curl 5a630f18174e3f39ad6a88df7c3b93b2270527ea4b19fc579305505d17b83995
cmake 3.30.2
curl 4e47bc89fe251262d304424d5c2aeb86c9a8f6c51a1d681c442b1a0db952b8a4
features core;net;tiff
fix-proj4-targets-cmake.patch 0bcc51d69830a495a955cb97a8a1f91d9cec0410cbd8198f82f4fe60169d696f
fix-uwp.patch 8ca726f9cca8465fb5efdcff7f2dc0c9247fa0782f0bd1d1384d7912afb7c3b8
fix-proj4-targets-cmake.patch d76e1d419d3367dda3381fd11a637f3465bc838d611fa8ceaca20048c1b3cd6e
fix-win-output-name.patch 706e536cfe9a90c1b071ab5b00932fa96bb35c67fcb0f96c0fc4feb7c0b44011
nlohmann-json 3a067cac280f4f73d0a02f1879011a572d128fc1a8c8b8e4caccb39e75ab9364
portfile.cmake 2d5a16a458eb429dfffb41c4e40c1815bfe58d2cdecff06ab5417fddd0cc82a5
ports.cmake 5a8e00cedff0c898b1f90f7d129329d0288801bc9056562b039698caf31ff3f3
nlohmann-json 44b358e958d4c5ea3b7311a0e07c50028e3cb9c79fa694374d7413da7976366d
portfile.cmake c1e2909ff16c46a449fbb7e71e14ea3d33103491f21ce5191d899cafded1822f
ports.cmake 3581688c16865b2026d28240d7c6257b848475ffc9fe81b3f3d304973253cf5e
post_build_checks 2
remove_toolset_restriction.patch 25c1c986673bd539f5ec920684a08b38d0d37d9e24b6793e5b79dbd717bde04e
sqlite3 ab2561acf368ba20b7c7b8bcc1cf3178819f8bc7e3f5d009d60bf327091bf44c
sqlite3 f86216823174c16c16ed1d33c1038759b2027189388633132d575e70d97656c8
tiff c271063333073276f88917c961b3c1e15d42746004d329004ffcb4a0dc3e5dae
triplet x64-linux-dynamic
triplet_abi 96ce2ff9564a8b4b9a180f0ffb82b6584a706982ae668e328ffc092ba334721f-b3b2cd43408edc7b2c5525859d9730edbfba2e4068956c6607a21d0abdaef072-ca0d0c5903d7f43fb3dd8f702ecaa82569b715ec
sqlite3 47c85d0bed92146a407dcc0edb61d0f4f8a20556dd62a6206bf5d42ced880e30
sqlite3 7f9a2e4c8f8fa823aa53f04d00eff5b32047d76990d69d93d359e7a5210f33c6
tiff 34d0c3db5c2929ab6f7b2a999fc538b38e2836eeb877278f2f017fee015eb5c3
triplet x64-linux-dynamic-release
triplet_abi 96ce2ff9564a8b4b9a180f0ffb82b6584a706982ae668e328ffc092ba334721f-8f20f723a25e9787363b95fcfd14b63d82b71ac3e295a73c97d09e681decb90c-7fb5c0e06a13ccc6d1603dc7d10780c1a92b2b78
usage 26169363c27e71a44cba9d703b22bbd13c191ab5e2d0612b3dd35c735c971fe6
vcpkg-cmake a433c5e70a35b07c67b910e7a8b1c55f3863c3927c2e874d56544673dfb5774f
vcpkg-cmake-config fe2f5adbb708be2b9a7caa1e73549d92dd41b68173ed2f187aad834b217b4491
vcpkg.json 34915344411521c3bf421a755e44461aecfbb6567f3e0f9b619ea0ea3eb0ab89
vcpkg-cmake c66dd076f3f67e579e976f89b74e524cdedfc182bb5cad640717708f93b8c6a0
vcpkg-cmake-config f7f14b802b782cd918b1e9be7c6580ffb50136c5ac6f65c0a5b78c4e0466820d
vcpkg.json 0dbf128c799775bc808b756ed9800d1fd349f7d41b874ad78a204d17988b8737
vcpkg_check_features 943b217e0968d64cf2cb9c272608e6a0b497377e792034f819809a79e1502c2b
vcpkg_copy_pdbs d57e4f196c82dc562a9968c6155073094513c31e2de475694143d3aa47954b1c
vcpkg_copy_tools 3d45ff761bddbabe8923b52330168dc3abd295fa469d3f2e47cb14dce85332d5
vcpkg_fixup_pkgconfig 588d833ff057d3ca99c14616c7ecfb5948b5e2a9e4fc02517dceb8b803473457
vcpkg_from_git 8f27bff0d01c6d15a3e691758df52bfbb0b1b929da45c4ebba02ef76b54b1881
vcpkg_fixup_pkgconfig 1a15f6c6d8e2b244d83a7514a0412d339127d2217d1df60ad1388b546c85f777
vcpkg_from_git 96ed81968f76354c00096dd8cd4e63c6a235fa969334a11ab18d11c0c512ff58
vcpkg_from_github b743742296a114ea1b18ae99672e02f142c4eb2bef7f57d36c038bedbfb0502f
vcpkg_list f5de3ebcbc40a4db90622ade9aca918e2cf404dc0d91342fcde457d730e6fa29
vcpkg_replace_string d43c8699ce27e25d47367c970d1c546f6bc36b6df8fb0be0c3986eb5830bd4f1
vcpkg_replace_string b450deb79207478b37119743e00808ebc42de0628e7b98c14ab24728bd5c78b8

View File

@@ -1,24 +1,28 @@
import warnings
"""Low level functions to read and write OGR data sources."""
import warnings
from io import BytesIO
from pathlib import Path
from pyogrio._compat import HAS_ARROW_API, HAS_ARROW_WRITE_API, HAS_PYARROW
from pyogrio._env import GDALEnv
from pyogrio._compat import HAS_ARROW_API
from pyogrio.core import detect_write_driver
from pyogrio.errors import DataSourceError
from pyogrio.util import (
get_vsi_path,
vsi_path,
_preprocess_options_key_value,
_mask_to_wkb,
_preprocess_options_key_value,
get_vsi_path_or_buffer,
vsi_path,
)
with GDALEnv():
from pyogrio._io import ogr_open_arrow, ogr_read, ogr_write
from pyogrio._io import ogr_open_arrow, ogr_read, ogr_write, ogr_write_arrow
from pyogrio._ogr import (
_get_driver_metadata_item,
get_gdal_version,
get_gdal_version_string,
ogr_driver_supports_vsi,
ogr_driver_supports_write,
remove_virtual_file,
_get_driver_metadata_item,
)
@@ -60,20 +64,23 @@ def read(
Parameters
----------
path_or_buffer : pathlib.Path or str, or bytes buffer
A dataset path or URI, or raw buffer.
A dataset path or URI, raw buffer, or file-like object with a read method.
layer : int or str, optional (default: first layer)
If an integer is provided, it corresponds to the index of the layer
with the data source. If a string is provided, it must match the name
of the layer in the data source. Defaults to first layer in data source.
encoding : str, optional (default: None)
If present, will be used as the encoding for reading string values from
the data source, unless encoding can be inferred directly from the data
source.
the data source. By default will automatically try to detect the native
encoding and decode to ``UTF-8``.
columns : list-like, optional (default: all columns)
List of column names to import from the data source. Column names must
exactly match the names in the data source, and will be returned in
the order they occur in the data source. To avoid reading any columns,
pass an empty list-like.
pass an empty list-like. If combined with ``where`` parameter, must
include columns referenced in the ``where`` expression or the data may
not be correctly read; the data source may return empty results or
raise an exception (behavior varies by driver).
read_geometry : bool, optional (default: True)
If True, will read geometry into WKB. If False, geometry will be None.
force_2d : bool, optional (default: False)
@@ -186,35 +193,27 @@ def read(
https://www.gaia-gis.it/gaia-sins/spatialite-sql-latest.html
"""
path, buffer = get_vsi_path(path_or_buffer)
dataset_kwargs = _preprocess_options_key_value(kwargs) if kwargs else {}
try:
result = ogr_read(
path,
layer=layer,
encoding=encoding,
columns=columns,
read_geometry=read_geometry,
force_2d=force_2d,
skip_features=skip_features,
max_features=max_features or 0,
where=where,
bbox=bbox,
mask=_mask_to_wkb(mask),
fids=fids,
sql=sql,
sql_dialect=sql_dialect,
return_fids=return_fids,
dataset_kwargs=dataset_kwargs,
datetime_as_string=datetime_as_string,
)
finally:
if buffer is not None:
remove_virtual_file(path)
return result
return ogr_read(
get_vsi_path_or_buffer(path_or_buffer),
layer=layer,
encoding=encoding,
columns=columns,
read_geometry=read_geometry,
force_2d=force_2d,
skip_features=skip_features,
max_features=max_features or 0,
where=where,
bbox=bbox,
mask=_mask_to_wkb(mask),
fids=fids,
sql=sql,
sql_dialect=sql_dialect,
return_fids=return_fids,
dataset_kwargs=dataset_kwargs,
datetime_as_string=datetime_as_string,
)
def read_arrow(
@@ -236,8 +235,7 @@ def read_arrow(
return_fids=False,
**kwargs,
):
"""
Read OGR data source into a pyarrow Table.
"""Read OGR data source into a pyarrow Table.
See docstring of `read` for parameters.
@@ -255,9 +253,18 @@ def read_arrow(
"geometry_type": "<geometry_type>",
"geometry_name": "<name of geometry column in arrow table>",
}
"""
if not HAS_PYARROW:
raise RuntimeError(
"pyarrow required to read using 'read_arrow'. You can use 'open_arrow' "
"to read data with an alternative Arrow implementation"
)
from pyarrow import Table
gdal_version = get_gdal_version()
if skip_features < 0:
raise ValueError("'skip_features' must be >= 0")
@@ -275,7 +282,7 @@ def read_arrow(
# handle skip_features internally within open_arrow if GDAL >= 3.8.0
gdal_skip_features = 0
if get_gdal_version() >= (3, 8, 0):
if gdal_version >= (3, 8, 0):
gdal_skip_features = skip_features
skip_features = 0
@@ -295,6 +302,7 @@ def read_arrow(
return_fids=return_fids,
skip_features=gdal_skip_features,
batch_size=batch_size,
use_pyarrow=True,
**kwargs,
) as source:
meta, reader = source
@@ -349,35 +357,68 @@ def open_arrow(
sql_dialect=None,
return_fids=False,
batch_size=65_536,
use_pyarrow=False,
**kwargs,
):
"""
Open OGR data source as a stream of pyarrow record batches.
"""Open OGR data source as a stream of Arrow record batches.
See docstring of `read` for parameters.
The RecordBatchStreamReader is reading from a stream provided by OGR and must not be
The returned object is reading from a stream provided by OGR and must not be
accessed after the OGR dataset has been closed, i.e. after the context manager has
been closed.
By default this functions returns a generic stream object implementing
the `Arrow PyCapsule Protocol`_ (i.e. having an ``__arrow_c_stream__``
method). This object can then be consumed by your Arrow implementation
of choice that supports this protocol.
Optionally, you can specify ``use_pyarrow=True`` to directly get the
stream as a `pyarrow.RecordBatchReader`.
.. _Arrow PyCapsule Protocol: https://arrow.apache.org/docs/format/CDataInterface/PyCapsuleInterface.html
Other Parameters
----------------
batch_size : int (default: 65_536)
Maximum number of features to retrieve in a batch.
use_pyarrow : bool (default: False)
If True, return a pyarrow RecordBatchReader instead of a generic
ArrowStream object. In the default case, this stream object needs
to be passed to another library supporting the Arrow PyCapsule
Protocol to consume the stream of data.
Examples
--------
>>> from pyogrio.raw import open_arrow
>>> import pyarrow as pa
>>> import shapely
>>>
>>> with open_arrow(path) as source:
>>> meta, stream = source
>>> # wrap the arrow stream object in a pyarrow RecordBatchReader
>>> reader = pa.RecordBatchReader.from_stream(stream)
>>> geom_col = meta["geometry_name"] or "wkb_geometry"
>>> for batch in reader:
>>> geometries = shapely.from_wkb(batch[geom_col])
The returned `stream` object needs to be consumed by a library implementing
the Arrow PyCapsule Protocol. In the above example, pyarrow is used through
its RecordBatchReader. For this case, you can also specify ``use_pyarrow=True``
to directly get this result as a short-cut:
>>> with open_arrow(path, use_pyarrow=True) as source:
>>> meta, reader = source
>>> for table in reader:
>>> geometries = shapely.from_wkb(table[meta["geometry_name"]])
>>> geom_col = meta["geometry_name"] or "wkb_geometry"
>>> for batch in reader:
>>> geometries = shapely.from_wkb(batch[geom_col])
Returns
-------
(dict, pyarrow.RecordBatchStreamReader)
(dict, pyarrow.RecordBatchReader or ArrowStream)
Returns a tuple of meta information about the data source in a dict,
and a pyarrow RecordBatchStreamReader with data.
and a data stream object (a generic ArrowStream object, or a pyarrow
RecordBatchReader if `use_pyarrow` is set to True).
Meta is: {
"crs": "<crs>",
@@ -386,41 +427,37 @@ def open_arrow(
"geometry_type": "<geometry_type>",
"geometry_name": "<name of geometry column in arrow table>",
}
"""
if not HAS_ARROW_API:
raise RuntimeError("pyarrow and GDAL>= 3.6 required to read using arrow")
path, buffer = get_vsi_path(path_or_buffer)
raise RuntimeError("GDAL>= 3.6 required to read using arrow")
dataset_kwargs = _preprocess_options_key_value(kwargs) if kwargs else {}
try:
return ogr_open_arrow(
path,
layer=layer,
encoding=encoding,
columns=columns,
read_geometry=read_geometry,
force_2d=force_2d,
skip_features=skip_features,
max_features=max_features or 0,
where=where,
bbox=bbox,
mask=_mask_to_wkb(mask),
fids=fids,
sql=sql,
sql_dialect=sql_dialect,
return_fids=return_fids,
dataset_kwargs=dataset_kwargs,
batch_size=batch_size,
)
finally:
if buffer is not None:
remove_virtual_file(path)
return ogr_open_arrow(
get_vsi_path_or_buffer(path_or_buffer),
layer=layer,
encoding=encoding,
columns=columns,
read_geometry=read_geometry,
force_2d=force_2d,
skip_features=skip_features,
max_features=max_features or 0,
where=where,
bbox=bbox,
mask=_mask_to_wkb(mask),
fids=fids,
sql=sql,
sql_dialect=sql_dialect,
return_fids=return_fids,
dataset_kwargs=dataset_kwargs,
batch_size=batch_size,
use_pyarrow=use_pyarrow,
)
def _parse_options_names(xml):
"""Convert metadata xml to list of names"""
"""Convert metadata xml to list of names."""
# Based on Fiona's meta.py
# (https://github.com/Toblerity/Fiona/blob/91c13ad8424641557a4e5f038f255f9b657b1bc5/fiona/meta.py)
import xml.etree.ElementTree as ET
@@ -436,6 +473,117 @@ def _parse_options_names(xml):
return options
def _validate_metadata(dataset_metadata, layer_metadata, metadata):
"""Validate the metadata."""
if metadata is not None:
if layer_metadata is not None:
raise ValueError("Cannot pass both metadata and layer_metadata")
layer_metadata = metadata
# validate metadata types
for meta in [dataset_metadata, layer_metadata]:
if meta is not None:
for k, v in meta.items():
if not isinstance(k, str):
raise ValueError(f"metadata key {k} must be a string")
if not isinstance(v, str):
raise ValueError(f"metadata value {v} must be a string")
return dataset_metadata, layer_metadata
def _preprocess_options_kwargs(driver, dataset_options, layer_options, kwargs):
"""Preprocess kwargs and split in dataset and layer creation options."""
dataset_kwargs = _preprocess_options_key_value(dataset_options or {})
layer_kwargs = _preprocess_options_key_value(layer_options or {})
if kwargs:
kwargs = _preprocess_options_key_value(kwargs)
dataset_option_names = _parse_options_names(
_get_driver_metadata_item(driver, "DMD_CREATIONOPTIONLIST")
)
layer_option_names = _parse_options_names(
_get_driver_metadata_item(driver, "DS_LAYER_CREATIONOPTIONLIST")
)
for k, v in kwargs.items():
if k in dataset_option_names:
dataset_kwargs[k] = v
elif k in layer_option_names:
layer_kwargs[k] = v
else:
raise ValueError(f"unrecognized option '{k}' for driver '{driver}'")
return dataset_kwargs, layer_kwargs
def _get_write_path_driver(path, driver, append=False):
"""Validate and return path and driver.
Parameters
----------
path : str or io.BytesIO
path to output file on writeable file system or an io.BytesIO object to
allow writing to memory. Will raise NotImplementedError if an open file
handle is passed.
driver : str, optional (default: None)
The OGR format driver used to write the vector file. By default attempts
to infer driver from path. Must be provided to write to a file-like
object.
append : bool, optional (default: False)
True if path and driver is being tested for append support
Returns
-------
(path, driver)
"""
if isinstance(path, BytesIO):
if driver is None:
raise ValueError("driver must be provided to write to in-memory file")
# blacklist certain drivers known not to work in current memory implementation
# because they create multiple files
if driver in {"ESRI Shapefile", "OpenFileGDB"}:
raise ValueError(f"writing to in-memory file is not supported for {driver}")
# verify that driver supports VSI methods
if not ogr_driver_supports_vsi(driver):
raise DataSourceError(
f"{driver} does not support ability to write in-memory in GDAL "
f"{get_gdal_version_string()}"
)
if append:
raise NotImplementedError("append is not supported for in-memory files")
elif hasattr(path, "write") and not isinstance(path, Path):
raise NotImplementedError(
"writing to an open file handle is not yet supported; instead, write to a "
"BytesIO instance and then read bytes from that to write to the file handle"
)
else:
path = vsi_path(path)
if driver is None:
driver = detect_write_driver(path)
# verify that driver supports writing
if not ogr_driver_supports_write(driver):
raise DataSourceError(
f"{driver} does not support write functionality in GDAL "
f"{get_gdal_version_string()}"
)
# prevent segfault from: https://github.com/OSGeo/gdal/issues/5739
if append and driver == "FlatGeobuf" and get_gdal_version() <= (3, 5, 0):
raise RuntimeError(
"append to FlatGeobuf is not supported for GDAL <= 3.5.0 due to segfault"
)
return path, driver
def write(
path,
geometry,
@@ -459,41 +607,99 @@ def write(
gdal_tz_offsets=None,
**kwargs,
):
"""Write geometry and field data to an OGR file format.
Parameters
----------
path : str or io.BytesIO
path to output file on writeable file system or an io.BytesIO object to
allow writing to memory. Will raise NotImplementedError if an open file
handle is passed; use BytesIO instead.
NOTE: support for writing to memory is limited to specific drivers.
geometry : ndarray of WKB encoded geometries or None
If None, geometries will not be written to output file
field_data : list-like of shape (num_fields, num_records)
contains one record per field to be written in same order as fields
fields : list-like
contains field names
field_mask : list-like of ndarrays or None, optional (default: None)
contains mask arrays indicating null values of the field at the same
position in the outer list, or None to indicate field does not have
a mask array
layer : str, optional (default: None)
layer name to create. If writing to memory and layer name is not
provided, it layer name will be set to a UUID4 value.
driver : string, optional (default: None)
The OGR format driver used to write the vector file. By default attempts
to infer driver from path. Must be provided to write to memory.
geometry_type : str, optional (default: None)
Possible values are: "Unknown", "Point", "LineString", "Polygon",
"MultiPoint", "MultiLineString", "MultiPolygon" or "GeometryCollection".
This parameter does not modify the geometry, but it will try to force
the layer type of the output file to this value. Use this parameter with
caution because using a wrong layer geometry type may result in errors
when writing the file, may be ignored by the driver, or may result in
invalid files.
crs : str, optional (default: None)
WKT-encoded CRS of the geometries to be written.
encoding : str, optional (default: None)
If present, will be used as the encoding for writing string values to
the file. Use with caution, only certain drivers support encodings
other than UTF-8.
promote_to_multi : bool, optional (default: None)
If True, will convert singular geometry types in the data to their
corresponding multi geometry type for writing. By default, will convert
mixed singular and multi geometry types to multi geometry types for
drivers that do not support mixed singular and multi geometry types. If
False, geometry types will not be promoted, which may result in errors
or invalid files when attempting to write mixed singular and multi
geometry types to drivers that do not support such combinations.
nan_as_null : bool, default True
For floating point columns (float32 / float64), whether NaN values are
written as "null" (missing value). Defaults to True because in pandas
NaNs are typically used as missing value. Note that when set to False,
behaviour is format specific: some formats don't support NaNs by
default (e.g. GeoJSON will skip this property) or might treat them as
null anyway (e.g. GeoPackage).
append : bool, optional (default: False)
If True, the data source specified by path already exists, and the
driver supports appending to an existing data source, will cause the
data to be appended to the existing records in the data source. Not
supported for writing to in-memory files.
NOTE: append support is limited to specific drivers and GDAL versions.
dataset_metadata : dict, optional (default: None)
Metadata to be stored at the dataset level in the output file; limited
to drivers that support writing metadata, such as GPKG, and silently
ignored otherwise. Keys and values must be strings.
layer_metadata : dict, optional (default: None)
Metadata to be stored at the layer level in the output file; limited to
drivers that support writing metadata, such as GPKG, and silently
ignored otherwise. Keys and values must be strings.
metadata : dict, optional (default: None)
alias of layer_metadata
dataset_options : dict, optional
Dataset creation options (format specific) passed to OGR. Specify as
a key-value dictionary.
layer_options : dict, optional
Layer creation options (format specific) passed to OGR. Specify as
a key-value dictionary.
gdal_tz_offsets : dict, optional (default: None)
Used to handle GDAL timezone offsets for each field contained in dict.
**kwargs
Additional driver-specific dataset creation options passed to OGR. Invalid
options will trigger a warning.
"""
# if dtypes is given, remove it from kwargs (dtypes is included in meta returned by
# read, and it is convenient to pass meta directly into write for round trip tests)
kwargs.pop("dtypes", None)
path = vsi_path(str(path))
if driver is None:
driver = detect_write_driver(path)
path, driver = _get_write_path_driver(path, driver, append=append)
# verify that driver supports writing
if not ogr_driver_supports_write(driver):
raise DataSourceError(
f"{driver} does not support write functionality in GDAL "
f"{get_gdal_version_string()}"
)
# prevent segfault from: https://github.com/OSGeo/gdal/issues/5739
if append and driver == "FlatGeobuf" and get_gdal_version() <= (3, 5, 0):
raise RuntimeError(
"append to FlatGeobuf is not supported for GDAL <= 3.5.0 due to segfault"
)
if metadata is not None:
if layer_metadata is not None:
raise ValueError("Cannot pass both metadata and layer_metadata")
layer_metadata = metadata
# validate metadata types
for metadata in [dataset_metadata, layer_metadata]:
if metadata is not None:
for k, v in metadata.items():
if not isinstance(k, str):
raise ValueError(f"metadata key {k} must be a string")
if not isinstance(v, str):
raise ValueError(f"metadata value {v} must be a string")
dataset_metadata, layer_metadata = _validate_metadata(
dataset_metadata, layer_metadata, metadata
)
if geometry is not None and promote_to_multi is None:
promote_to_multi = (
@@ -505,27 +711,14 @@ def write(
warnings.warn(
"'crs' was not provided. The output dataset will not have "
"projection information defined and may not be usable in other "
"systems."
"systems.",
stacklevel=2,
)
# preprocess kwargs and split in dataset and layer creation options
dataset_kwargs = _preprocess_options_key_value(dataset_options or {})
layer_kwargs = _preprocess_options_key_value(layer_options or {})
if kwargs:
kwargs = _preprocess_options_key_value(kwargs)
dataset_option_names = _parse_options_names(
_get_driver_metadata_item(driver, "DMD_CREATIONOPTIONLIST")
)
layer_option_names = _parse_options_names(
_get_driver_metadata_item(driver, "DS_LAYER_CREATIONOPTIONLIST")
)
for k, v in kwargs.items():
if k in dataset_option_names:
dataset_kwargs[k] = v
elif k in layer_option_names:
layer_kwargs[k] = v
else:
raise ValueError(f"unrecognized option '{k}' for driver '{driver}'")
dataset_kwargs, layer_kwargs = _preprocess_options_kwargs(
driver, dataset_options, layer_options, kwargs
)
ogr_write(
path,
@@ -547,3 +740,148 @@ def write(
layer_kwargs=layer_kwargs,
gdal_tz_offsets=gdal_tz_offsets,
)
def write_arrow(
arrow_obj,
path,
layer=None,
driver=None,
geometry_name=None,
geometry_type=None,
crs=None,
encoding=None,
append=False,
dataset_metadata=None,
layer_metadata=None,
metadata=None,
dataset_options=None,
layer_options=None,
**kwargs,
):
"""Write an Arrow-compatible data source to an OGR file format.
.. _Arrow PyCapsule Protocol: https://arrow.apache.org/docs/format/CDataInterface/PyCapsuleInterface.html
Parameters
----------
arrow_obj
The Arrow data to write. This can be any Arrow-compatible tabular data
object that implements the `Arrow PyCapsule Protocol`_ (i.e. has an
``__arrow_c_stream__`` method), for example a pyarrow Table or
RecordBatchReader.
path : str or io.BytesIO
path to output file on writeable file system or an io.BytesIO object to
allow writing to memory
NOTE: support for writing to memory is limited to specific drivers.
layer : str, optional (default: None)
layer name to create. If writing to memory and layer name is not
provided, it layer name will be set to a UUID4 value.
driver : string, optional (default: None)
The OGR format driver used to write the vector file. By default attempts
to infer driver from path. Must be provided to write to memory.
geometry_name : str, optional (default: None)
The name of the column in the input data that will be written as the
geometry field. Will be inferred from the input data if the geometry
column is annotated as an "geoarrow.wkb" or "ogc.wkb" extension type.
Otherwise needs to be specified explicitly.
geometry_type : str
The geometry type of the written layer. Currently, this needs to be
specified explicitly when creating a new layer with geometries.
Possible values are: "Unknown", "Point", "LineString", "Polygon",
"MultiPoint", "MultiLineString", "MultiPolygon" or "GeometryCollection".
This parameter does not modify the geometry, but it will try to force the layer
type of the output file to this value. Use this parameter with caution because
using a wrong layer geometry type may result in errors when writing the
file, may be ignored by the driver, or may result in invalid files.
crs : str, optional (default: None)
WKT-encoded CRS of the geometries to be written.
encoding : str, optional (default: None)
Only used for the .dbf file of ESRI Shapefiles. If not specified,
uses the default locale.
append : bool, optional (default: False)
If True, the data source specified by path already exists, and the
driver supports appending to an existing data source, will cause the
data to be appended to the existing records in the data source. Not
supported for writing to in-memory files.
NOTE: append support is limited to specific drivers and GDAL versions.
dataset_metadata : dict, optional (default: None)
Metadata to be stored at the dataset level in the output file; limited
to drivers that support writing metadata, such as GPKG, and silently
ignored otherwise. Keys and values must be strings.
layer_metadata : dict, optional (default: None)
Metadata to be stored at the layer level in the output file; limited to
drivers that support writing metadata, such as GPKG, and silently
ignored otherwise. Keys and values must be strings.
metadata : dict, optional (default: None)
alias of layer_metadata
dataset_options : dict, optional
Dataset creation options (format specific) passed to OGR. Specify as
a key-value dictionary.
layer_options : dict, optional
Layer creation options (format specific) passed to OGR. Specify as
a key-value dictionary.
**kwargs
Additional driver-specific dataset or layer creation options passed
to OGR. pyogrio will attempt to automatically pass those keywords
either as dataset or as layer creation option based on the known
options for the specific driver. Alternatively, you can use the
explicit `dataset_options` or `layer_options` keywords to manually
do this (for example if an option exists as both dataset and layer
option).
"""
if not HAS_ARROW_WRITE_API:
raise RuntimeError("GDAL>=3.8 required to write using arrow")
if not hasattr(arrow_obj, "__arrow_c_stream__"):
raise ValueError(
"The provided data is not recognized as Arrow data. The object "
"should implement the Arrow PyCapsule Protocol (i.e. have a "
"'__arrow_c_stream__' method)."
)
path, driver = _get_write_path_driver(path, driver, append=append)
if "promote_to_multi" in kwargs:
raise ValueError(
"The 'promote_to_multi' option is not supported when writing using Arrow"
)
if geometry_name is not None:
if geometry_type is None:
raise ValueError("'geometry_type' keyword is required")
if crs is None:
# TODO: does GDAL infer CRS automatically from geometry metadata?
warnings.warn(
"'crs' was not provided. The output dataset will not have "
"projection information defined and may not be usable in other "
"systems.",
stacklevel=2,
)
dataset_metadata, layer_metadata = _validate_metadata(
dataset_metadata, layer_metadata, metadata
)
# preprocess kwargs and split in dataset and layer creation options
dataset_kwargs, layer_kwargs = _preprocess_options_kwargs(
driver, dataset_options, layer_options, kwargs
)
ogr_write_arrow(
path,
layer=layer,
driver=driver,
arrow_obj=arrow_obj,
geometry_type=geometry_type,
geometry_name=geometry_name,
crs=crs,
encoding=encoding,
append=append,
dataset_metadata=dataset_metadata,
layer_metadata=layer_metadata,
dataset_kwargs=dataset_kwargs,
layer_kwargs=layer_kwargs,
)

View File

@@ -1,16 +1,26 @@
from io import BytesIO
from pathlib import Path
from zipfile import ZipFile, ZIP_DEFLATED
from zipfile import ZIP_DEFLATED, ZipFile
import pytest
import numpy as np
from pyogrio import (
__gdal_version_string__,
__version__,
list_drivers,
)
from pyogrio._compat import HAS_ARROW_API, HAS_GDAL_GEOS, HAS_SHAPELY
from pyogrio._compat import (
HAS_ARROW_API,
HAS_ARROW_WRITE_API,
HAS_GDAL_GEOS,
HAS_PYARROW,
HAS_PYPROJ,
HAS_SHAPELY,
)
from pyogrio.core import vsi_rmtree
from pyogrio.raw import read, write
import pytest
_data_dir = Path(__file__).parent.resolve() / "fixtures"
@@ -29,6 +39,15 @@ DRIVER_EXT = {driver: ext for ext, driver in DRIVERS.items()}
ALL_EXTS = [".fgb", ".geojson", ".geojsonl", ".gpkg", ".shp"]
START_FID = {
".fgb": 0,
".geojson": 0,
".geojsonl": 0,
".geojsons": 0,
".gpkg": 1,
".shp": 0,
}
def pytest_report_header(config):
drivers = ", ".join(
@@ -43,8 +62,16 @@ def pytest_report_header(config):
# marks to skip tests if optional dependecies are not present
requires_arrow_api = pytest.mark.skipif(
not HAS_ARROW_API, reason="GDAL>=3.6 and pyarrow required"
requires_arrow_api = pytest.mark.skipif(not HAS_ARROW_API, reason="GDAL>=3.6 required")
requires_pyarrow_api = pytest.mark.skipif(
not HAS_ARROW_API or not HAS_PYARROW, reason="GDAL>=3.6 and pyarrow required"
)
requires_pyproj = pytest.mark.skipif(not HAS_PYPROJ, reason="pyproj required")
requires_arrow_write_api = pytest.mark.skipif(
not HAS_ARROW_WRITE_API or not HAS_PYARROW,
reason="GDAL>=3.8 required for Arrow write API",
)
requires_gdal_geos = pytest.mark.skipif(
@@ -99,20 +126,51 @@ def naturalearth_lowres_all_ext(tmp_path, naturalearth_lowres, request):
@pytest.fixture(scope="function")
def naturalearth_lowres_vsi(tmp_path, naturalearth_lowres):
"""Wrap naturalearth_lowres as a zip file for vsi tests"""
"""Wrap naturalearth_lowres as a zip file for VSI tests"""
path = tmp_path / f"{naturalearth_lowres.name}.zip"
with ZipFile(path, mode="w", compression=ZIP_DEFLATED, compresslevel=5) as out:
for ext in ["dbf", "prj", "shp", "shx"]:
for ext in ["dbf", "prj", "shp", "shx", "cpg"]:
filename = f"{naturalearth_lowres.stem}.{ext}"
out.write(naturalearth_lowres.parent / filename, filename)
return path, f"/vsizip/{path}/{naturalearth_lowres.name}"
@pytest.fixture(scope="function")
def naturalearth_lowres_vsimem(naturalearth_lowres):
"""Write naturalearth_lowres to a vsimem file for VSI tests"""
meta, _, geometry, field_data = read(naturalearth_lowres)
name = f"pyogrio_fixture_{naturalearth_lowres.stem}"
dst_path = Path(f"/vsimem/{name}/{name}.gpkg")
meta["spatial_index"] = False
meta["geometry_type"] = "MultiPolygon"
write(dst_path, geometry, field_data, layer="naturalearth_lowres", **meta)
yield dst_path
vsi_rmtree(dst_path.parent)
@pytest.fixture(scope="session")
def test_fgdb_vsi():
return f"/vsizip/{_data_dir}/test_fgdb.gdb.zip"
def line_zm_file():
return _data_dir / "line_zm.gpkg"
@pytest.fixture(scope="session")
def curve_file():
return _data_dir / "curve.gpkg"
@pytest.fixture(scope="session")
def curve_polygon_file():
return _data_dir / "curvepolygon.gpkg"
@pytest.fixture(scope="session")
def multisurface_file():
return _data_dir / "multisurface.gpkg"
@pytest.fixture(scope="session")
@@ -120,16 +178,221 @@ def test_gpkg_nulls():
return _data_dir / "test_gpkg_nulls.gpkg"
@pytest.fixture(scope="session")
def test_ogr_types_list():
return _data_dir / "test_ogr_types_list.geojson"
@pytest.fixture(scope="function")
def no_geometry_file(tmp_path):
# create a GPKG layer that does not include geometry
filename = tmp_path / "test_no_geometry.gpkg"
write(
filename,
layer="no_geometry",
geometry=None,
field_data=[np.array(["a", "b", "c"])],
fields=["col"],
)
return filename
@pytest.fixture(scope="session")
def test_datetime():
return _data_dir / "test_datetime.geojson"
@pytest.fixture(scope="function")
def list_field_values_file(tmp_path):
# Create a GeoJSON file with list values in a property
list_geojson = """{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"properties": { "int64": 1, "list_int64": [0, 1] },
"geometry": { "type": "Point", "coordinates": [0, 2] }
},
{
"type": "Feature",
"properties": { "int64": 2, "list_int64": [2, 3] },
"geometry": { "type": "Point", "coordinates": [1, 2] }
},
{
"type": "Feature",
"properties": { "int64": 3, "list_int64": [4, 5] },
"geometry": { "type": "Point", "coordinates": [2, 2] }
},
{
"type": "Feature",
"properties": { "int64": 4, "list_int64": [6, 7] },
"geometry": { "type": "Point", "coordinates": [3, 2] }
},
{
"type": "Feature",
"properties": { "int64": 5, "list_int64": [8, 9] },
"geometry": { "type": "Point", "coordinates": [4, 2] }
}
]
}"""
filename = tmp_path / "test_ogr_types_list.geojson"
with open(filename, "w") as f:
_ = f.write(list_geojson)
return filename
@pytest.fixture(scope="session")
def test_datetime_tz():
return _data_dir / "test_datetime_tz.geojson"
@pytest.fixture(scope="function")
def nested_geojson_file(tmp_path):
# create GeoJSON file with nested properties
nested_geojson = """{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [0, 0]
},
"properties": {
"top_level": "A",
"intermediate_level": {
"bottom_level": "B"
}
}
}
]
}"""
filename = tmp_path / "test_nested.geojson"
with open(filename, "w") as f:
_ = f.write(nested_geojson)
return filename
@pytest.fixture(scope="function")
def datetime_file(tmp_path):
# create GeoJSON file with millisecond precision
datetime_geojson = """{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"properties": { "col": "2020-01-01T09:00:00.123" },
"geometry": { "type": "Point", "coordinates": [1, 1] }
},
{
"type": "Feature",
"properties": { "col": "2020-01-01T10:00:00" },
"geometry": { "type": "Point", "coordinates": [2, 2] }
}
]
}"""
filename = tmp_path / "test_datetime.geojson"
with open(filename, "w") as f:
_ = f.write(datetime_geojson)
return filename
@pytest.fixture(scope="function")
def datetime_tz_file(tmp_path):
# create GeoJSON file with datetimes with timezone
datetime_tz_geojson = """{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"properties": { "datetime_col": "2020-01-01T09:00:00.123-05:00" },
"geometry": { "type": "Point", "coordinates": [1, 1] }
},
{
"type": "Feature",
"properties": { "datetime_col": "2020-01-01T10:00:00-05:00" },
"geometry": { "type": "Point", "coordinates": [2, 2] }
}
]
}"""
filename = tmp_path / "test_datetime_tz.geojson"
with open(filename, "w") as f:
f.write(datetime_tz_geojson)
return filename
@pytest.fixture(scope="function")
def geojson_bytes(tmp_path):
"""Extracts first 3 records from naturalearth_lowres and writes to GeoJSON,
returning bytes"""
meta, _, geometry, field_data = read(
_data_dir / Path("naturalearth_lowres/naturalearth_lowres.shp"), max_features=3
)
filename = tmp_path / "test.geojson"
write(filename, geometry, field_data, **meta)
with open(filename, "rb") as f:
bytes_buffer = f.read()
return bytes_buffer
@pytest.fixture(scope="function")
def geojson_filelike(tmp_path):
"""Extracts first 3 records from naturalearth_lowres and writes to GeoJSON,
returning open file handle"""
meta, _, geometry, field_data = read(
_data_dir / Path("naturalearth_lowres/naturalearth_lowres.shp"), max_features=3
)
filename = tmp_path / "test.geojson"
write(filename, geometry, field_data, layer="test", **meta)
with open(filename, "rb") as f:
yield f
@pytest.fixture(scope="function")
def nonseekable_bytes(tmp_path):
# mock a non-seekable byte stream, such as a zstandard handle
class NonSeekableBytesIO(BytesIO):
def seekable(self):
return False
def seek(self, *args, **kwargs):
raise OSError("cannot seek")
# wrap GeoJSON into a non-seekable BytesIO
geojson = """{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"properties": { },
"geometry": { "type": "Point", "coordinates": [1, 1] }
}
]
}"""
return NonSeekableBytesIO(geojson.encode("UTF-8"))
@pytest.fixture(
scope="session",
params=[
# Japanese
("CP932", ""),
# Chinese
("CP936", "中文"),
# Central European
("CP1250", "Đ"),
# Latin 1 / Western European
("CP1252", "ÿ"),
# Greek
("CP1253", "Φ"),
# Arabic
("CP1256", "ش"),
],
)
def encoded_text(request):
"""Return tuple with encoding name and very short sample text in that encoding
NOTE: it was determined through testing that code pages for MS-DOS do not
consistently work across all Python installations (in particular, fail with conda),
but ANSI code pages appear to work properly.
"""
return request.param

View File

@@ -1,13 +1,28 @@
# Test datasets
## Natural Earth lowres
## Obtaining / creating test datasets
If a test dataset can be created in code, do that instead. If it is used in a
single test, create the test dataset as part of that test. If it is used in
more than a single test, add it to `pyogrio/tests/conftest.py` instead, as a
function-scoped test fixture.
If you need to obtain 3rd party test files:
- add a section below that describes the source location and processing steps
to derive that dataset
- make sure the license is compatible with including in Pyogrio (public domain or open-source)
and record that license below
Please keep the test files no larger than necessary to use in tests.
## Included test datasets
### Natural Earth lowres
`naturalearth_lowres.shp` was copied from GeoPandas.
## FGDB test dataset
`test_fgdb.gdb.zip`
Downloaded from http://trac.osgeo.org/gdal/raw-attachment/wiki/FileGDB/test_fgdb.gdb.zip
License: public domain
### GPKG test dataset with null values
@@ -75,15 +90,19 @@ NOTE: Reading boolean values into GeoPandas using Fiona backend treats those
values as `None` and column dtype as `object`; Pyogrio treats those values as
`np.nan` and column dtype as `float64`.
### GPKG test with MultiSurface
This was extracted from https://prd-tnm.s3.amazonaws.com/StagedProducts/Hydrography/NHDPlusHR/Beta/GDB/NHDPLUS_H_0308_HU4_GDB.zip
`NHDWaterbody` layer using ogr2ogr:
```bash
ogr2ogr test_mixed_surface.gpkg NHDPLUS_H_0308_HU4_GDB.gdb NHDWaterbody -where '"NHDPlusID" = 15000300070477' -select "NHDPlusID"
```
License: same as Pyogrio
### OSM PBF test
This was downloaded from https://github.com/openstreetmap/OSM-binary/blob/master/resources/sample.pbf
License: [Open Data Commons Open Database License (ODbL)](https://opendatacommons.org/licenses/odbl/)
### Test files for geometry types that are downgraded on read
`line_zm.gpkg` was created using QGIS to digitize a LineString GPKG layer with Z and M enabled. Downgraded to LineString Z on read.
`curve.gpkg` was created using QGIS to digitize a Curve GPKG layer. Downgraded to LineString on read.
`curvepolygon.gpkg` was created using QGIS to digitize a CurvePolygon GPKG layer. Downgraded to Polygon on read.
`multisurface.gpkg` was created using QGIS to digitize a MultiSurface GPKG layer. Downgraded to MultiPolygon on read.
License: same as Pyogrio

View File

@@ -1,7 +0,0 @@
{
"type": "FeatureCollection",
"features": [
{ "type": "Feature", "properties": { "col": "2020-01-01T09:00:00.123" }, "geometry": { "type": "Point", "coordinates": [ 1.0, 1.0 ] } },
{ "type": "Feature", "properties": { "col": "2020-01-01T10:00:00" }, "geometry": { "type": "Point", "coordinates": [ 2.0, 2.0 ] } }
]
}

View File

@@ -1,8 +0,0 @@
{
"type": "FeatureCollection",
"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } },
"features": [
{ "type": "Feature", "properties": { "datetime_col": "2020-01-01T09:00:00.123-05:00" }, "geometry": { "type": "Point", "coordinates": [ 1.0, 1.0 ] } },
{ "type": "Feature", "properties": { "datetime_col": "2020-01-01T10:00:00-05:00" }, "geometry": { "type": "Point", "coordinates": [ 2.0, 2.0 ] } }
]
}

View File

@@ -1,18 +0,0 @@
{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [0, 0]
},
"properties": {
"top_level": "A",
"intermediate_level": {
"bottom_level": "B"
}
}
}
]
}

View File

@@ -1,12 +0,0 @@
{
"type": "FeatureCollection",
"name": "test",
"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } },
"features": [
{ "type": "Feature", "properties": { "int64": 1, "list_int64": [ 0, 1 ] }, "geometry": { "type": "Point", "coordinates": [ 0.0, 2.0 ] } },
{ "type": "Feature", "properties": { "int64": 2, "list_int64": [ 2, 3 ] }, "geometry": { "type": "Point", "coordinates": [ 1.0, 2.0 ] } },
{ "type": "Feature", "properties": { "int64": 3, "list_int64": [ 4, 5 ] }, "geometry": { "type": "Point", "coordinates": [ 2.0, 2.0 ] } },
{ "type": "Feature", "properties": { "int64": 4, "list_int64": [ 6, 7 ] }, "geometry": { "type": "Point", "coordinates": [ 3.0, 2.0 ] } },
{ "type": "Feature", "properties": { "int64": 5, "list_int64": [ 8, 9 ] }, "geometry": { "type": "Point", "coordinates": [ 4.0, 2.0 ] } }
]
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,28 +1,35 @@
from pathlib import Path
import numpy as np
from numpy import array_equal, allclose
import pytest
from numpy import allclose, array_equal
from pyogrio import (
__gdal_version__,
__gdal_geos_version__,
__gdal_version__,
detect_write_driver,
get_gdal_config_option,
get_gdal_data_path,
list_drivers,
list_layers,
read_bounds,
read_info,
set_gdal_config_options,
get_gdal_config_option,
get_gdal_data_path,
vsi_listtree,
vsi_rmtree,
vsi_unlink,
)
from pyogrio.core import detect_write_driver
from pyogrio.errors import DataSourceError, DataLayerError
from pyogrio.tests.conftest import HAS_SHAPELY, prepare_testfile
from pyogrio._compat import GDAL_GE_38
from pyogrio._env import GDALEnv
from pyogrio.errors import DataLayerError, DataSourceError
from pyogrio.raw import read, write
from pyogrio.tests.conftest import START_FID, prepare_testfile, requires_shapely
import pytest
with GDALEnv():
# NOTE: this must be AFTER above imports, which init the GDAL and PROJ data
# search paths
from pyogrio._ogr import ogr_driver_supports_write, has_gdal_data, has_proj_data
from pyogrio._ogr import has_gdal_data, has_proj_data, ogr_driver_supports_write
try:
@@ -150,7 +157,16 @@ def test_list_drivers():
assert len(drivers) == len(expected)
def test_list_layers(naturalearth_lowres, naturalearth_lowres_vsi, test_fgdb_vsi):
def test_list_layers(
naturalearth_lowres,
naturalearth_lowres_vsi,
naturalearth_lowres_vsimem,
line_zm_file,
curve_file,
curve_polygon_file,
multisurface_file,
no_geometry_file,
):
assert array_equal(
list_layers(naturalearth_lowres), [["naturalearth_lowres", "Polygon"]]
)
@@ -159,38 +175,98 @@ def test_list_layers(naturalearth_lowres, naturalearth_lowres_vsi, test_fgdb_vsi
list_layers(naturalearth_lowres_vsi[1]), [["naturalearth_lowres", "Polygon"]]
)
assert array_equal(
list_layers(naturalearth_lowres_vsimem),
[["naturalearth_lowres", "MultiPolygon"]],
)
# Measured 3D is downgraded to plain 3D during read
# Make sure this warning is raised
with pytest.warns(
UserWarning, match=r"Measured \(M\) geometry types are not supported"
):
fgdb_layers = list_layers(test_fgdb_vsi)
# GDAL >= 3.4.0 includes 'another_relationship' layer
assert len(fgdb_layers) >= 7
assert array_equal(list_layers(line_zm_file), [["line_zm", "LineString Z"]])
# Make sure that nonspatial layer has None for geometry
assert array_equal(fgdb_layers[0], ["basetable_2", None])
# Curve / surface types are downgraded to plain types
assert array_equal(list_layers(curve_file), [["curve", "LineString"]])
assert array_equal(list_layers(curve_polygon_file), [["curvepolygon", "Polygon"]])
assert array_equal(
list_layers(multisurface_file), [["multisurface", "MultiPolygon"]]
)
# Confirm that measured 3D is downgraded to plain 3D during read
assert array_equal(fgdb_layers[3], ["test_lines", "MultiLineString Z"])
assert array_equal(fgdb_layers[6], ["test_areas", "MultiPolygon Z"])
# Make sure that nonspatial layer has None for geometry
assert array_equal(list_layers(no_geometry_file), [["no_geometry", None]])
def test_read_bounds(naturalearth_lowres):
fids, bounds = read_bounds(naturalearth_lowres)
def test_list_layers_bytes(geojson_bytes):
layers = list_layers(geojson_bytes)
assert layers.shape == (1, 2)
assert layers[0, 0] == "test"
def test_list_layers_nonseekable_bytes(nonseekable_bytes):
layers = list_layers(nonseekable_bytes)
assert layers.shape == (1, 2)
assert layers[0, 1] == "Point"
def test_list_layers_filelike(geojson_filelike):
layers = list_layers(geojson_filelike)
assert layers.shape == (1, 2)
assert layers[0, 0] == "test"
@pytest.mark.parametrize(
"testfile",
["naturalearth_lowres", "naturalearth_lowres_vsimem", "naturalearth_lowres_vsi"],
)
def test_read_bounds(testfile, request):
path = request.getfixturevalue(testfile)
path = path if not isinstance(path, tuple) else path[1]
fids, bounds = read_bounds(path)
assert fids.shape == (177,)
assert bounds.shape == (4, 177)
assert fids[0] == 0
assert fids[0] == START_FID[Path(path).suffix]
# Fiji; wraps antimeridian
assert allclose(bounds[:, 0], [-180.0, -18.28799, 180.0, -16.02088])
def test_read_bounds_bytes(geojson_bytes):
fids, bounds = read_bounds(geojson_bytes)
assert fids.shape == (3,)
assert bounds.shape == (4, 3)
assert allclose(bounds[:, 0], [-180.0, -18.28799, 180.0, -16.02088])
def test_read_bounds_nonseekable_bytes(nonseekable_bytes):
fids, bounds = read_bounds(nonseekable_bytes)
assert fids.shape == (1,)
assert bounds.shape == (4, 1)
assert allclose(bounds[:, 0], [1, 1, 1, 1])
def test_read_bounds_filelike(geojson_filelike):
fids, bounds = read_bounds(geojson_filelike)
assert fids.shape == (3,)
assert bounds.shape == (4, 3)
assert allclose(bounds[:, 0], [-180.0, -18.28799, 180.0, -16.02088])
def test_read_bounds_max_features(naturalearth_lowres):
bounds = read_bounds(naturalearth_lowres, max_features=2)[1]
assert bounds.shape == (4, 2)
def test_read_bounds_unspecified_layer_warning(data_dir):
"""Reading a multi-layer file without specifying a layer gives a warning."""
with pytest.warns(UserWarning, match="More than one layer found "):
read_bounds(data_dir / "sample.osm.pbf")
def test_read_bounds_negative_max_features(naturalearth_lowres):
with pytest.raises(ValueError, match="'max_features' must be >= 0"):
read_bounds(naturalearth_lowres, max_features=-1)
@@ -240,12 +316,9 @@ def test_read_bounds_bbox(naturalearth_lowres_all_ext):
fids, bounds = read_bounds(naturalearth_lowres_all_ext, bbox=(-85, 8, -80, 10))
assert fids.shape == (2,)
if naturalearth_lowres_all_ext.suffix == ".gpkg":
# fid in gpkg is 1-based
assert array_equal(fids, [34, 35]) # PAN, CRI
else:
# fid in other formats is 0-based
assert array_equal(fids, [33, 34]) # PAN, CRI
fids_expected = np.array([33, 34]) # PAN, CRI
fids_expected += START_FID[naturalearth_lowres_all_ext.suffix]
assert array_equal(fids, fids_expected)
assert bounds.shape == (4, 2)
assert allclose(
@@ -257,9 +330,7 @@ def test_read_bounds_bbox(naturalearth_lowres_all_ext):
)
@pytest.mark.skipif(
not HAS_SHAPELY, reason="Shapely is required for mask functionality"
)
@requires_shapely
@pytest.mark.parametrize(
"mask",
[
@@ -273,9 +344,7 @@ def test_read_bounds_mask_invalid(naturalearth_lowres, mask):
read_bounds(naturalearth_lowres, mask=mask)
@pytest.mark.skipif(
not HAS_SHAPELY, reason="Shapely is required for mask functionality"
)
@requires_shapely
def test_read_bounds_bbox_mask_invalid(naturalearth_lowres):
with pytest.raises(ValueError, match="cannot set both 'bbox' and 'mask'"):
read_bounds(
@@ -283,9 +352,7 @@ def test_read_bounds_bbox_mask_invalid(naturalearth_lowres):
)
@pytest.mark.skipif(
not HAS_SHAPELY, reason="Shapely is required for mask functionality"
)
@requires_shapely
@pytest.mark.parametrize(
"mask,expected",
[
@@ -316,12 +383,8 @@ def test_read_bounds_mask(naturalearth_lowres_all_ext, mask, expected):
fids = read_bounds(naturalearth_lowres_all_ext, mask=mask)[0]
if naturalearth_lowres_all_ext.suffix == ".gpkg":
# fid in gpkg is 1-based
assert array_equal(fids, np.array(expected) + 1)
else:
# fid in other formats is 0-based
assert array_equal(fids, expected)
fids_expected = np.array(expected) + START_FID[naturalearth_lowres_all_ext.suffix]
assert array_equal(fids, fids_expected)
@pytest.mark.skipif(
@@ -337,40 +400,87 @@ def test_read_bounds_bbox_intersects_vs_envelope_overlaps(naturalearth_lowres_al
if __gdal_geos_version__ is None:
# bboxes for CAN, RUS overlap but do not intersect geometries
assert fids.shape == (4,)
if naturalearth_lowres_all_ext.suffix == ".gpkg":
# fid in gpkg is 1-based
assert array_equal(fids, [4, 5, 19, 28]) # CAN, USA, RUS, MEX
else:
# fid in other formats is 0-based
assert array_equal(fids, [3, 4, 18, 27]) # CAN, USA, RUS, MEX
fids_expected = np.array([3, 4, 18, 27]) # CAN, USA, RUS, MEX
fids_expected += START_FID[naturalearth_lowres_all_ext.suffix]
assert array_equal(fids, fids_expected)
else:
assert fids.shape == (2,)
if naturalearth_lowres_all_ext.suffix == ".gpkg":
# fid in gpkg is 1-based
assert array_equal(fids, [5, 28]) # USA, MEX
else:
# fid in other formats is 0-based
assert array_equal(fids, [4, 27]) # USA, MEX
fids_expected = np.array([4, 27]) # USA, MEX
fids_expected += START_FID[naturalearth_lowres_all_ext.suffix]
assert array_equal(fids, fids_expected)
@pytest.mark.parametrize("naturalearth_lowres", [".shp", ".gpkg"], indirect=True)
def test_read_info(naturalearth_lowres):
meta = read_info(naturalearth_lowres)
assert meta["layer_name"] == "naturalearth_lowres"
assert meta["crs"] == "EPSG:4326"
assert meta["geometry_type"] == "Polygon"
assert meta["encoding"] == "UTF-8"
assert meta["fields"].shape == (5,)
assert meta["dtypes"].tolist() == ["int64", "object", "object", "object", "float64"]
assert meta["features"] == 177
assert allclose(meta["total_bounds"], (-180, -90, 180, 83.64513))
assert meta["driver"] == "ESRI Shapefile"
assert meta["capabilities"]["random_read"] is True
assert meta["capabilities"]["fast_set_next_by_index"] is True
assert meta["capabilities"]["fast_spatial_filter"] is False
assert meta["capabilities"]["fast_feature_count"] is True
assert meta["capabilities"]["fast_total_bounds"] is True
if naturalearth_lowres.suffix == ".gpkg":
assert meta["fid_column"] == "fid"
assert meta["geometry_name"] == "geom"
assert meta["geometry_type"] == "MultiPolygon"
assert meta["driver"] == "GPKG"
if GDAL_GE_38:
# this capability is only True for GPKG if GDAL >= 3.8
assert meta["capabilities"]["fast_set_next_by_index"] is True
elif naturalearth_lowres.suffix == ".shp":
# fid_column == "" for formats where fid is not physically stored
assert meta["fid_column"] == ""
# geometry_name == "" for formats where geometry column name cannot be
# customized
assert meta["geometry_name"] == ""
assert meta["geometry_type"] == "Polygon"
assert meta["driver"] == "ESRI Shapefile"
assert meta["capabilities"]["fast_set_next_by_index"] is True
else:
raise ValueError(f"test not implemented for ext {naturalearth_lowres.suffix}")
@pytest.mark.parametrize(
"testfile", ["naturalearth_lowres_vsimem", "naturalearth_lowres_vsi"]
)
def test_read_info_vsi(testfile, request):
path = request.getfixturevalue(testfile)
path = path if not isinstance(path, tuple) else path[1]
meta = read_info(path)
assert meta["fields"].shape == (5,)
assert meta["features"] == 177
def test_read_info_bytes(geojson_bytes):
meta = read_info(geojson_bytes)
assert meta["fields"].shape == (5,)
assert meta["features"] == 3
def test_read_info_nonseekable_bytes(nonseekable_bytes):
meta = read_info(nonseekable_bytes)
assert meta["fields"].shape == (0,)
assert meta["features"] == 1
def test_read_info_filelike(geojson_filelike):
meta = read_info(geojson_filelike)
assert meta["fields"].shape == (5,)
assert meta["features"] == 3
@pytest.mark.parametrize(
"dataset_kwargs,fields",
@@ -399,8 +509,8 @@ def test_read_info(naturalearth_lowres):
),
],
)
def test_read_info_dataset_kwargs(data_dir, dataset_kwargs, fields):
meta = read_info(data_dir / "test_nested.geojson", **dataset_kwargs)
def test_read_info_dataset_kwargs(nested_geojson_file, dataset_kwargs, fields):
meta = read_info(nested_geojson_file, **dataset_kwargs)
assert meta["fields"].tolist() == fields
@@ -440,10 +550,12 @@ def test_read_info_force_feature_count(data_dir, layer, force, expected):
[(True, (-180.0, -90.0, 180.0, 83.64513)), (False, None)],
)
def test_read_info_force_total_bounds(
tmpdir, naturalearth_lowres, force_total_bounds, expected_total_bounds
tmp_path, naturalearth_lowres, force_total_bounds, expected_total_bounds
):
# Geojson files don't hava a fast way to determine total_bounds
geojson_path = prepare_testfile(naturalearth_lowres, dst_dir=tmpdir, ext=".geojson")
geojson_path = prepare_testfile(
naturalearth_lowres, dst_dir=tmp_path, ext=".geojsonl"
)
info = read_info(geojson_path, force_total_bounds=force_total_bounds)
if expected_total_bounds is not None:
assert allclose(info["total_bounds"], expected_total_bounds)
@@ -451,8 +563,14 @@ def test_read_info_force_total_bounds(
assert info["total_bounds"] is None
def test_read_info_without_geometry(test_fgdb_vsi):
assert read_info(test_fgdb_vsi)["total_bounds"] is None
def test_read_info_unspecified_layer_warning(data_dir):
"""Reading a multi-layer file without specifying a layer gives a warning."""
with pytest.warns(UserWarning, match="More than one layer found "):
read_info(data_dir / "sample.osm.pbf")
def test_read_info_without_geometry(no_geometry_file):
assert read_info(no_geometry_file)["total_bounds"] is None
@pytest.mark.parametrize(
@@ -494,3 +612,67 @@ def test_error_handling_warning(capfd, naturalearth_lowres):
read_info(naturalearth_lowres, INVALID="YES")
assert capfd.readouterr().err == ""
def test_vsimem_listtree_rmtree_unlink(naturalearth_lowres):
"""Test all basic functionalities of file handling in /vsimem/."""
# Prepare test data in /vsimem
meta, _, geometry, field_data = read(naturalearth_lowres)
meta["spatial_index"] = False
meta["geometry_type"] = "MultiPolygon"
test_file_path = Path("/vsimem/pyogrio_test_naturalearth_lowres.gpkg")
test_dir_path = Path(f"/vsimem/pyogrio_dir_test/{naturalearth_lowres.stem}.gpkg")
write(test_file_path, geometry, field_data, **meta)
write(test_dir_path, geometry, field_data, **meta)
# Check if everything was created properly with listtree
files = vsi_listtree("/vsimem/")
assert test_file_path.as_posix() in files
assert test_dir_path.as_posix() in files
# Check listtree with pattern
files = vsi_listtree("/vsimem/", pattern="pyogrio_dir_test*.gpkg")
assert test_file_path.as_posix() not in files
assert test_dir_path.as_posix() in files
files = vsi_listtree("/vsimem/", pattern="pyogrio_test*.gpkg")
assert test_file_path.as_posix() in files
assert test_dir_path.as_posix() not in files
# Remove test_dir and its contents
vsi_rmtree(test_dir_path.parent)
files = vsi_listtree("/vsimem/")
assert test_file_path.as_posix() in files
assert test_dir_path.as_posix() not in files
# Remove test_file
vsi_unlink(test_file_path)
def test_vsimem_rmtree_error(naturalearth_lowres_vsimem):
with pytest.raises(NotADirectoryError, match="Path is not a directory"):
vsi_rmtree(naturalearth_lowres_vsimem)
with pytest.raises(FileNotFoundError, match="Path does not exist"):
vsi_rmtree("/vsimem/non-existent")
with pytest.raises(
OSError, match="path to in-memory file or directory is required"
):
vsi_rmtree("/vsimem")
with pytest.raises(
OSError, match="path to in-memory file or directory is required"
):
vsi_rmtree("/vsimem/")
# Verify that naturalearth_lowres_vsimem still exists.
assert naturalearth_lowres_vsimem.as_posix() in vsi_listtree("/vsimem")
def test_vsimem_unlink_error(naturalearth_lowres_vsimem):
with pytest.raises(IsADirectoryError, match="Path is a directory"):
vsi_unlink(naturalearth_lowres_vsimem.parent)
with pytest.raises(FileNotFoundError, match="Path does not exist"):
vsi_unlink("/vsimem/non-existent.gpkg")

View File

@@ -1,15 +1,17 @@
import os
import contextlib
from zipfile import ZipFile, ZIP_DEFLATED
import pytest
import os
from pathlib import Path
from zipfile import ZIP_DEFLATED, ZipFile
import pyogrio
import pyogrio.raw
from pyogrio.util import vsi_path
from pyogrio._compat import HAS_PYPROJ
from pyogrio.util import get_vsi_path_or_buffer, vsi_path
import pytest
try:
import geopandas # NOQA
import geopandas # noqa: F401
has_geopandas = True
except ImportError:
@@ -31,9 +33,11 @@ def change_cwd(path):
[
# local file paths that should be passed through as is
("data.gpkg", "data.gpkg"),
(Path("data.gpkg"), "data.gpkg"),
("/home/user/data.gpkg", "/home/user/data.gpkg"),
(r"C:\User\Documents\data.gpkg", r"C:\User\Documents\data.gpkg"),
("file:///home/user/data.gpkg", "/home/user/data.gpkg"),
("/home/folder # with hash/data.gpkg", "/home/folder # with hash/data.gpkg"),
# cloud URIs
("https://testing/data.gpkg", "/vsicurl/https://testing/data.gpkg"),
("s3://testing/data.gpkg", "/vsis3/testing/data.gpkg"),
@@ -82,6 +86,8 @@ def change_cwd(path):
"s3://testing/test.zip!a/b/item.shp",
"/vsizip/vsis3/testing/test.zip/a/b/item.shp",
),
("/vsimem/data.gpkg", "/vsimem/data.gpkg"),
(Path("/vsimem/data.gpkg"), "/vsimem/data.gpkg"),
],
)
def test_vsi_path(path, expected):
@@ -236,6 +242,9 @@ def test_detect_zip_path(tmp_path, naturalearth_lowres):
path = tmp_path / "test.zip"
with ZipFile(path, mode="w", compression=ZIP_DEFLATED, compresslevel=5) as out:
for ext in ["dbf", "prj", "shp", "shx"]:
if not HAS_PYPROJ and ext == "prj":
continue
filename = f"test1.{ext}"
out.write(tmp_path / filename, filename)
@@ -265,7 +274,7 @@ def test_detect_zip_path(tmp_path, naturalearth_lowres):
@pytest.mark.network
def test_url():
url = "https://raw.githubusercontent.com/geopandas/pyogrio/main/pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.shp" # NOQA
url = "https://raw.githubusercontent.com/geopandas/pyogrio/main/pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.shp"
result = pyogrio.raw.read(url)
assert len(result[2]) == 177
@@ -277,9 +286,10 @@ def test_url():
assert len(result[0]) == 177
@pytest.mark.network
@pytest.mark.skipif(not has_geopandas, reason="GeoPandas not available")
def test_url_dataframe():
url = "https://raw.githubusercontent.com/geopandas/pyogrio/main/pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.shp" # NOQA
url = "https://raw.githubusercontent.com/geopandas/pyogrio/main/pyogrio/tests/fixtures/naturalearth_lowres/naturalearth_lowres.shp"
assert len(pyogrio.read_dataframe(url)) == 177
@@ -330,3 +340,25 @@ def test_uri_s3(aws_env_setup):
def test_uri_s3_dataframe(aws_env_setup):
df = pyogrio.read_dataframe("zip+s3://fiona-testing/coutwildrnp.zip")
assert len(df) == 67
@pytest.mark.parametrize(
"path, expected",
[
(Path("/tmp/test.gpkg"), str(Path("/tmp/test.gpkg"))),
(Path("/vsimem/test.gpkg"), "/vsimem/test.gpkg"),
],
)
def test_get_vsi_path_or_buffer_obj_to_string(path, expected):
"""Verify that get_vsi_path_or_buffer retains forward slashes in /vsimem paths.
The /vsimem paths should keep forward slashes for GDAL to recognize them as such.
However, on Windows systems, forward slashes are by default replaced by backslashes,
so this test verifies that this doesn't happen for /vsimem paths.
"""
assert get_vsi_path_or_buffer(path) == expected
def test_get_vsi_path_or_buffer_fixtures_to_string(tmp_path):
path = tmp_path / "test.gpkg"
assert get_vsi_path_or_buffer(path) == str(path)

View File

@@ -1,29 +1,36 @@
import contextlib
import ctypes
import json
import os
import sys
from io import BytesIO
from zipfile import ZipFile
import numpy as np
from numpy import array_equal
import pytest
import pyogrio
from pyogrio import (
list_layers,
__gdal_version__,
get_gdal_config_option,
list_drivers,
list_layers,
read_info,
set_gdal_config_options,
__gdal_version__,
)
from pyogrio._compat import HAS_SHAPELY
from pyogrio.raw import read, write
from pyogrio.errors import DataSourceError, DataLayerError, FeatureError
from pyogrio._compat import HAS_PYARROW, HAS_SHAPELY
from pyogrio.errors import DataLayerError, DataSourceError, FeatureError
from pyogrio.raw import open_arrow, read, write
from pyogrio.tests.conftest import (
DRIVERS,
DRIVER_EXT,
DRIVERS,
prepare_testfile,
requires_arrow_api,
requires_pyarrow_api,
requires_shapely,
)
import pytest
try:
import shapely
except ImportError:
@@ -79,6 +86,12 @@ def test_read_autodetect_driver(tmp_path, naturalearth_lowres, ext):
assert len(geometry) == len(fields[0])
def test_read_arrow_unspecified_layer_warning(data_dir):
"""Reading a multi-layer file without specifying a layer gives a warning."""
with pytest.warns(UserWarning, match="More than one layer found "):
read(data_dir / "sample.osm.pbf")
def test_read_invalid_layer(naturalearth_lowres):
with pytest.raises(DataLayerError, match="Layer 'invalid' could not be opened"):
read(naturalearth_lowres, layer="invalid")
@@ -106,6 +119,29 @@ def test_read_no_geometry(naturalearth_lowres):
assert geometry is None
@requires_shapely
def test_read_no_geometry__mask(naturalearth_lowres):
geometry, fields = read(
naturalearth_lowres,
read_geometry=False,
mask=shapely.Point(-105, 55),
)[2:]
assert np.array_equal(fields[3], ["CAN"])
assert geometry is None
def test_read_no_geometry__bbox(naturalearth_lowres):
geometry, fields = read(
naturalearth_lowres,
read_geometry=False,
bbox=(-109.0, 55.0, -109.0, 55.0),
)[2:]
assert np.array_equal(fields[3], ["CAN"])
assert geometry is None
def test_read_no_geometry_no_columns_no_fids(naturalearth_lowres):
with pytest.raises(
ValueError,
@@ -245,9 +281,7 @@ def test_read_bbox_where(naturalearth_lowres_all_ext):
assert np.array_equal(fields[3], ["CAN"])
@pytest.mark.skipif(
not HAS_SHAPELY, reason="Shapely is required for mask functionality"
)
@requires_shapely
@pytest.mark.parametrize(
"mask",
[
@@ -261,17 +295,13 @@ def test_read_mask_invalid(naturalearth_lowres, mask):
read(naturalearth_lowres, mask=mask)
@pytest.mark.skipif(
not HAS_SHAPELY, reason="Shapely is required for mask functionality"
)
@requires_shapely
def test_read_bbox_mask_invalid(naturalearth_lowres):
with pytest.raises(ValueError, match="cannot set both 'bbox' and 'mask'"):
read(naturalearth_lowres, bbox=(-85, 8, -80, 10), mask=shapely.Point(-105, 55))
@pytest.mark.skipif(
not HAS_SHAPELY, reason="Shapely is required for mask functionality"
)
@requires_shapely
@pytest.mark.parametrize(
"mask,expected",
[
@@ -306,9 +336,7 @@ def test_read_mask(naturalearth_lowres_all_ext, mask, expected):
assert len(geometry) == len(expected)
@pytest.mark.skipif(
not HAS_SHAPELY, reason="Shapely is required for mask functionality"
)
@requires_shapely
def test_read_mask_sql(naturalearth_lowres_all_ext):
fields = read(
naturalearth_lowres_all_ext,
@@ -319,9 +347,7 @@ def test_read_mask_sql(naturalearth_lowres_all_ext):
assert np.array_equal(fields[3], ["CAN"])
@pytest.mark.skipif(
not HAS_SHAPELY, reason="Shapely is required for mask functionality"
)
@requires_shapely
def test_read_mask_where(naturalearth_lowres_all_ext):
fields = read(
naturalearth_lowres_all_ext,
@@ -414,35 +440,43 @@ def test_read_return_only_fids(naturalearth_lowres):
assert len(field_data) == 0
def test_write(tmpdir, naturalearth_lowres):
@pytest.mark.parametrize("encoding", [None, "ISO-8859-1"])
def test_write_shp(tmp_path, naturalearth_lowres, encoding):
meta, _, geometry, field_data = read(naturalearth_lowres)
filename = os.path.join(str(tmpdir), "test.shp")
filename = tmp_path / "test.shp"
meta["encoding"] = encoding
write(filename, geometry, field_data, **meta)
assert os.path.exists(filename)
assert filename.exists()
for ext in (".dbf", ".prj"):
assert os.path.exists(filename.replace(".shp", ext))
assert filename.with_suffix(ext).exists()
# We write shapefiles in UTF-8 by default on all platforms
expected_encoding = encoding if encoding is not None else "UTF-8"
with open(filename.with_suffix(".cpg")) as cpg_file:
result_encoding = cpg_file.read()
assert result_encoding == expected_encoding
def test_write_gpkg(tmpdir, naturalearth_lowres):
def test_write_gpkg(tmp_path, naturalearth_lowres):
meta, _, geometry, field_data = read(naturalearth_lowres)
meta.update({"geometry_type": "MultiPolygon"})
filename = os.path.join(str(tmpdir), "test.gpkg")
filename = tmp_path / "test.gpkg"
write(filename, geometry, field_data, driver="GPKG", **meta)
assert os.path.exists(filename)
assert filename.exists()
def test_write_gpkg_multiple_layers(tmpdir, naturalearth_lowres):
def test_write_gpkg_multiple_layers(tmp_path, naturalearth_lowres):
meta, _, geometry, field_data = read(naturalearth_lowres)
meta["geometry_type"] = "MultiPolygon"
filename = os.path.join(str(tmpdir), "test.gpkg")
filename = tmp_path / "test.gpkg"
write(filename, geometry, field_data, driver="GPKG", layer="first", **meta)
assert os.path.exists(filename)
assert filename.exists()
assert np.array_equal(list_layers(filename), [["first", "MultiPolygon"]])
@@ -453,13 +487,13 @@ def test_write_gpkg_multiple_layers(tmpdir, naturalearth_lowres):
)
def test_write_geojson(tmpdir, naturalearth_lowres):
def test_write_geojson(tmp_path, naturalearth_lowres):
meta, _, geometry, field_data = read(naturalearth_lowres)
filename = os.path.join(str(tmpdir), "test.json")
filename = tmp_path / "test.json"
write(filename, geometry, field_data, driver="GeoJSON", **meta)
assert os.path.exists(filename)
assert filename.exists()
data = json.loads(open(filename).read())
@@ -478,17 +512,21 @@ def test_write_no_fields(tmp_path, naturalearth_lowres):
meta, _, geometry, field_data = read(naturalearth_lowres)
field_data = None
meta["fields"] = None
# naturalearth_lowres actually contains MultiPolygons. A shapefile doesn't make the
# distinction, so the metadata just reports Polygon. GPKG does, so override here to
# avoid GDAL warnings.
meta["geometry_type"] = "MultiPolygon"
# Test
filename = tmp_path / "test.gpkg"
write(filename, geometry, field_data, driver="GPKG", **meta)
# Check result
assert os.path.exists(filename)
assert filename.exists()
meta, _, geometry, fields = read(filename)
assert meta["crs"] == "EPSG:4326"
assert meta["geometry_type"] == "Polygon"
assert meta["geometry_type"] == "MultiPolygon"
assert meta["encoding"] == "UTF-8"
assert meta["fields"].shape == (0,)
assert len(fields) == 0
@@ -510,7 +548,7 @@ def test_write_no_geom(tmp_path, naturalearth_lowres):
write(filename, geometry, field_data, driver="GPKG", **meta)
# Check result
assert os.path.exists(filename)
assert filename.exists()
meta, _, geometry, fields = read(filename)
assert meta["crs"] is None
@@ -547,7 +585,7 @@ def test_write_no_geom_data(tmp_path, naturalearth_lowres):
write(filename, geometry, field_data, driver="GPKG", **meta)
# Check result
assert os.path.exists(filename)
assert filename.exists()
result_meta, _, result_geometry, result_field_data = read(filename)
assert result_meta["crs"] is None
@@ -581,17 +619,84 @@ def test_write_no_geom_no_fields():
__gdal_version__ < (3, 6, 0),
reason="OpenFileGDB write support only available for GDAL >= 3.6.0",
)
def test_write_openfilegdb(tmpdir, naturalearth_lowres):
meta, _, geometry, field_data = read(naturalearth_lowres)
@pytest.mark.parametrize(
"write_int64",
[
False,
pytest.param(
True,
marks=pytest.mark.skipif(
__gdal_version__ < (3, 9, 0),
reason="OpenFileGDB write support for int64 values for GDAL >= 3.9.0",
),
),
],
)
def test_write_openfilegdb(tmp_path, write_int64):
# Point(0, 0)
expected_geometry = np.array(
[bytes.fromhex("010100000000000000000000000000000000000000")] * 3, dtype=object
)
expected_field_data = [
np.array([True, False, True], dtype="bool"),
np.array([1, 2, 3], dtype="int16"),
np.array([1, 2, 3], dtype="int32"),
np.array([1, 2, 3], dtype="int64"),
np.array([1, 2, 3], dtype="float32"),
np.array([1, 2, 3], dtype="float64"),
]
expected_fields = ["bool", "int16", "int32", "int64", "float32", "float64"]
expected_meta = {
"geometry_type": "Point",
"crs": "EPSG:4326",
"fields": expected_fields,
}
filename = os.path.join(str(tmpdir), "test.gdb")
write(filename, geometry, field_data, driver="OpenFileGDB", **meta)
filename = tmp_path / "test.gdb"
assert os.path.exists(filename)
# int64 is not supported without additional config: https://gdal.org/en/latest/drivers/vector/openfilegdb.html#bit-integer-field-support
# it is converted to float64 by default and raises a warning
# (for GDAL >= 3.9.0 only)
write_params = (
{"TARGET_ARCGIS_VERSION": "ARCGIS_PRO_3_2_OR_LATER"} if write_int64 else {}
)
if write_int64 or __gdal_version__ < (3, 9, 0):
ctx = contextlib.nullcontext()
else:
ctx = pytest.warns(
RuntimeWarning, match="Integer64 will be written as a Float64"
)
with ctx:
write(
filename,
expected_geometry,
expected_field_data,
driver="OpenFileGDB",
**expected_meta,
**write_params,
)
meta, _, geometry, field_data = read(filename)
if not write_int64:
expected_field_data[3] = expected_field_data[3].astype("float64")
# bool types are converted to int32
expected_field_data[0] = expected_field_data[0].astype("int32")
assert meta["crs"] == expected_meta["crs"]
assert np.array_equal(meta["fields"], expected_meta["fields"])
assert np.array_equal(geometry, expected_geometry)
for i in range(len(expected_field_data)):
assert field_data[i].dtype == expected_field_data[i].dtype
assert np.array_equal(field_data[i], expected_field_data[i])
@pytest.mark.parametrize("ext", DRIVERS)
def test_write_append(tmpdir, naturalearth_lowres, ext):
def test_write_append(tmp_path, naturalearth_lowres, ext):
if ext == ".fgb" and __gdal_version__ <= (3, 5, 0):
pytest.skip("Append to FlatGeobuf fails for GDAL <= 3.5.0")
@@ -603,10 +708,10 @@ def test_write_append(tmpdir, naturalearth_lowres, ext):
# coerce output layer to MultiPolygon to avoid mixed type errors
meta["geometry_type"] = "MultiPolygon"
filename = os.path.join(str(tmpdir), f"test{ext}")
filename = tmp_path / f"test{ext}"
write(filename, geometry, field_data, **meta)
assert os.path.exists(filename)
assert filename.exists()
assert read_info(filename)["features"] == 177
@@ -617,17 +722,17 @@ def test_write_append(tmpdir, naturalearth_lowres, ext):
@pytest.mark.parametrize("driver,ext", [("GML", ".gml"), ("GeoJSONSeq", ".geojsons")])
def test_write_append_unsupported(tmpdir, naturalearth_lowres, driver, ext):
def test_write_append_unsupported(tmp_path, naturalearth_lowres, driver, ext):
if ext == ".geojsons" and __gdal_version__ >= (3, 6, 0):
pytest.skip("Append to GeoJSONSeq supported for GDAL >= 3.6.0")
meta, _, geometry, field_data = read(naturalearth_lowres)
# GML does not support append functionality
filename = os.path.join(str(tmpdir), f"test{ext}")
filename = tmp_path / f"test{ext}"
write(filename, geometry, field_data, driver=driver, **meta)
assert os.path.exists(filename)
assert filename.exists()
assert read_info(filename, force_feature_count=True)["features"] == 177
@@ -639,16 +744,16 @@ def test_write_append_unsupported(tmpdir, naturalearth_lowres, driver, ext):
__gdal_version__ > (3, 5, 0),
reason="segfaults on FlatGeobuf limited to GDAL <= 3.5.0",
)
def test_write_append_prevent_gdal_segfault(tmpdir, naturalearth_lowres):
def test_write_append_prevent_gdal_segfault(tmp_path, naturalearth_lowres):
"""GDAL <= 3.5.0 segfaults when appending to FlatGeobuf; this test
verifies that we catch that before segfault"""
meta, _, geometry, field_data = read(naturalearth_lowres)
meta["geometry_type"] = "MultiPolygon"
filename = os.path.join(str(tmpdir), "test.fgb")
filename = tmp_path / "test.fgb"
write(filename, geometry, field_data, **meta)
assert os.path.exists(filename)
assert filename.exists()
with pytest.raises(
RuntimeError, # match="append to FlatGeobuf is not supported for GDAL <= 3.5.0"
@@ -664,7 +769,7 @@ def test_write_append_prevent_gdal_segfault(tmpdir, naturalearth_lowres):
if driver not in ("ESRI Shapefile", "GPKG", "GeoJSON")
},
)
def test_write_supported(tmpdir, naturalearth_lowres, driver):
def test_write_supported(tmp_path, naturalearth_lowres, driver):
"""Test drivers known to work that are not specifically tested above"""
meta, _, geometry, field_data = read(naturalearth_lowres, columns=["iso_a3"])
@@ -673,7 +778,7 @@ def test_write_supported(tmpdir, naturalearth_lowres, driver):
# we take the first record only.
meta["geometry_type"] = "MultiPolygon"
filename = tmpdir / f"test{DRIVER_EXT[driver]}"
filename = tmp_path / f"test{DRIVER_EXT[driver]}"
write(
filename,
geometry[:1],
@@ -688,10 +793,10 @@ def test_write_supported(tmpdir, naturalearth_lowres, driver):
@pytest.mark.skipif(
__gdal_version__ >= (3, 6, 0), reason="OpenFileGDB supports write for GDAL >= 3.6.0"
)
def test_write_unsupported(tmpdir, naturalearth_lowres):
def test_write_unsupported(tmp_path, naturalearth_lowres):
meta, _, geometry, field_data = read(naturalearth_lowres)
filename = os.path.join(str(tmpdir), "test.gdb")
filename = tmp_path / "test.gdb"
with pytest.raises(DataSourceError, match="does not support write functionality"):
write(filename, geometry, field_data, driver="OpenFileGDB", **meta)
@@ -721,7 +826,7 @@ def assert_equal_result(result1, result2):
assert np.array_equal(meta1["fields"], meta2["fields"])
assert np.array_equal(index1, index2)
assert all([np.array_equal(f1, f2) for f1, f2 in zip(field_data1, field_data2)])
assert all(np.array_equal(f1, f2) for f1, f2 in zip(field_data1, field_data2))
if HAS_SHAPELY:
# a plain `assert np.array_equal(geometry1, geometry2)` doesn't work
@@ -734,10 +839,10 @@ def assert_equal_result(result1, result2):
@pytest.mark.filterwarnings("ignore:File /vsimem:RuntimeWarning") # TODO
@pytest.mark.parametrize("driver,ext", [("GeoJSON", "geojson"), ("GPKG", "gpkg")])
def test_read_from_bytes(tmpdir, naturalearth_lowres, driver, ext):
def test_read_from_bytes(tmp_path, naturalearth_lowres, driver, ext):
meta, index, geometry, field_data = read(naturalearth_lowres)
meta.update({"geometry_type": "Unknown"})
filename = os.path.join(str(tmpdir), f"test.{ext}")
filename = tmp_path / f"test.{ext}"
write(filename, geometry, field_data, driver=driver, **meta)
with open(filename, "rb") as f:
@@ -747,7 +852,7 @@ def test_read_from_bytes(tmpdir, naturalearth_lowres, driver, ext):
assert_equal_result((meta, index, geometry, field_data), result2)
def test_read_from_bytes_zipped(tmpdir, naturalearth_lowres_vsi):
def test_read_from_bytes_zipped(naturalearth_lowres_vsi):
path, vsi_path = naturalearth_lowres_vsi
meta, index, geometry, field_data = read(vsi_path)
@@ -760,10 +865,10 @@ def test_read_from_bytes_zipped(tmpdir, naturalearth_lowres_vsi):
@pytest.mark.filterwarnings("ignore:File /vsimem:RuntimeWarning") # TODO
@pytest.mark.parametrize("driver,ext", [("GeoJSON", "geojson"), ("GPKG", "gpkg")])
def test_read_from_file_like(tmpdir, naturalearth_lowres, driver, ext):
def test_read_from_file_like(tmp_path, naturalearth_lowres, driver, ext):
meta, index, geometry, field_data = read(naturalearth_lowres)
meta.update({"geometry_type": "Unknown"})
filename = os.path.join(str(tmpdir), f"test.{ext}")
filename = tmp_path / f"test.{ext}"
write(filename, geometry, field_data, driver=driver, **meta)
with open(filename, "rb") as f:
@@ -772,6 +877,12 @@ def test_read_from_file_like(tmpdir, naturalearth_lowres, driver, ext):
assert_equal_result((meta, index, geometry, field_data), result2)
def test_read_from_nonseekable_bytes(nonseekable_bytes):
meta, _, geometry, _ = read(nonseekable_bytes)
assert meta["fields"].shape == (0,)
assert len(geometry) == 1
@pytest.mark.parametrize("ext", ["gpkg", "fgb"])
def test_read_write_data_types_numeric(tmp_path, ext):
# Point(0, 0)
@@ -787,13 +898,13 @@ def test_read_write_data_types_numeric(tmp_path, ext):
np.array([1, 2, 3], dtype="float64"),
]
fields = ["bool", "int16", "int32", "int64", "float32", "float64"]
meta = dict(geometry_type="Point", crs="EPSG:4326", spatial_index=False)
meta = {"geometry_type": "Point", "crs": "EPSG:4326", "spatial_index": False}
filename = tmp_path / f"test.{ext}"
write(filename, geometry, field_data, fields, **meta)
result = read(filename)[3]
assert all([np.array_equal(f1, f2) for f1, f2 in zip(result, field_data)])
assert all([f1.dtype == f2.dtype for f1, f2 in zip(result, field_data)])
assert all(np.array_equal(f1, f2) for f1, f2 in zip(result, field_data))
assert all(f1.dtype == f2.dtype for f1, f2 in zip(result, field_data))
# other integer data types that don't roundtrip exactly
# these are generally promoted to a larger integer type except for uint64
@@ -844,7 +955,7 @@ def test_read_write_datetime(tmp_path):
geometry = np.array(
[bytes.fromhex("010100000000000000000000000000000000000000")] * 2, dtype=object
)
meta = dict(geometry_type="Point", crs="EPSG:4326", spatial_index=False)
meta = {"geometry_type": "Point", "crs": "EPSG:4326", "spatial_index": False}
filename = tmp_path / "test.gpkg"
write(filename, geometry, field_data, fields, **meta)
@@ -867,7 +978,7 @@ def test_read_write_int64_large(tmp_path, ext):
)
field_data = [np.array([1, 2192502720, -5], dtype="int64")]
fields = ["overflow_int64"]
meta = dict(geometry_type="Point", crs="EPSG:4326", spatial_index=False)
meta = {"geometry_type": "Point", "crs": "EPSG:4326", "spatial_index": False}
filename = tmp_path / f"test.{ext}"
write(filename, geometry, field_data, fields, **meta)
@@ -890,17 +1001,17 @@ def test_read_data_types_numeric_with_null(test_gpkg_nulls):
assert field.dtype == "float64"
def test_read_unsupported_types(test_ogr_types_list):
fields = read(test_ogr_types_list)[3]
def test_read_unsupported_types(list_field_values_file):
fields = read(list_field_values_file)[3]
# list field gets skipped, only integer field is read
assert len(fields) == 1
fields = read(test_ogr_types_list, columns=["int64"])[3]
fields = read(list_field_values_file, columns=["int64"])[3]
assert len(fields) == 1
def test_read_datetime_millisecond(test_datetime):
field = read(test_datetime)[3][0]
def test_read_datetime_millisecond(datetime_file):
field = read(datetime_file)[3][0]
assert field.dtype == "datetime64[ms]"
assert field[0] == np.datetime64("2020-01-01 09:00:00.123")
assert field[1] == np.datetime64("2020-01-01 10:00:00.000")
@@ -929,13 +1040,14 @@ def test_read_unsupported_ext_with_prefix(tmp_path):
assert field_data[0] == "data1"
def test_read_datetime_as_string(test_datetime_tz):
field = read(test_datetime_tz)[3][0]
def test_read_datetime_as_string(datetime_tz_file):
field = read(datetime_tz_file)[3][0]
assert field.dtype == "datetime64[ms]"
# timezone is ignored in numpy layer
assert field[0] == np.datetime64("2020-01-01 09:00:00.123")
assert field[1] == np.datetime64("2020-01-01 10:00:00.000")
field = read(test_datetime_tz, datetime_as_string=True)[3][0]
field = read(datetime_tz_file, datetime_as_string=True)[3][0]
assert field.dtype == "object"
# GDAL doesn't return strings in ISO format (yet)
assert field[0] == "2020/01/01 09:00:00.123-05"
@@ -951,7 +1063,7 @@ def test_read_write_null_geometry(tmp_path, ext):
)
field_data = [np.array([1, 2], dtype="int32")]
fields = ["col"]
meta = dict(geometry_type="Point", crs="EPSG:4326")
meta = {"geometry_type": "Point", "crs": "EPSG:4326"}
if ext == "gpkg":
meta["spatial_index"] = False
@@ -971,12 +1083,12 @@ def test_write_float_nan_null(tmp_path, dtype):
)
field_data = [np.array([1.5, np.nan], dtype=dtype)]
fields = ["col"]
meta = dict(geometry_type="Point", crs="EPSG:4326")
fname = tmp_path / "test.geojson"
meta = {"geometry_type": "Point", "crs": "EPSG:4326"}
filename = tmp_path / "test.geojson"
# default nan_as_null=True
write(fname, geometry, field_data, fields, **meta)
with open(str(fname), "r") as f:
write(filename, geometry, field_data, fields, **meta)
with open(filename) as f:
content = f.read()
assert '{ "col": null }' in content
@@ -987,14 +1099,14 @@ def test_write_float_nan_null(tmp_path, dtype):
else:
ctx = contextlib.nullcontext()
with ctx:
write(fname, geometry, field_data, fields, **meta, nan_as_null=False)
with open(str(fname), "r") as f:
write(filename, geometry, field_data, fields, **meta, nan_as_null=False)
with open(filename) as f:
content = f.read()
assert '"properties": { }' in content
# but can instruct GDAL to write NaN to json
write(
fname,
filename,
geometry,
field_data,
fields,
@@ -1002,12 +1114,12 @@ def test_write_float_nan_null(tmp_path, dtype):
nan_as_null=False,
WRITE_NON_FINITE_VALUES="YES",
)
with open(str(fname), "r") as f:
with open(filename) as f:
content = f.read()
assert '{ "col": NaN }' in content
@requires_arrow_api
@requires_pyarrow_api
@pytest.mark.skipif(
"Arrow" not in list_drivers(), reason="Arrow driver is not available"
)
@@ -1021,7 +1133,7 @@ def test_write_float_nan_null_arrow(tmp_path):
)
field_data = [np.array([1.5, np.nan], dtype="float64")]
fields = ["col"]
meta = dict(geometry_type="Point", crs="EPSG:4326")
meta = {"geometry_type": "Point", "crs": "EPSG:4326"}
fname = tmp_path / "test.arrow"
# default nan_as_null=True
@@ -1039,6 +1151,112 @@ def test_write_float_nan_null_arrow(tmp_path):
assert pc.is_nan(table["col"]).to_pylist() == [False, True]
@pytest.mark.filterwarnings("ignore:File /vsimem:RuntimeWarning")
@pytest.mark.parametrize("driver", ["GeoJSON", "GPKG"])
def test_write_memory(naturalearth_lowres, driver):
meta, _, geometry, field_data = read(naturalearth_lowres)
meta.update({"geometry_type": "MultiPolygon"})
buffer = BytesIO()
write(buffer, geometry, field_data, driver=driver, layer="test", **meta)
assert len(buffer.getbuffer()) > 0
assert list_layers(buffer)[0][0] == "test"
actual_meta, _, actual_geometry, actual_field_data = read(buffer)
assert np.array_equal(actual_meta["fields"], meta["fields"])
assert np.array_equal(actual_field_data, field_data)
assert len(actual_geometry) == len(geometry)
def test_write_memory_driver_required(naturalearth_lowres):
meta, _, geometry, field_data = read(naturalearth_lowres)
buffer = BytesIO()
with pytest.raises(
ValueError,
match="driver must be provided to write to in-memory file",
):
write(buffer, geometry, field_data, driver=None, layer="test", **meta)
@pytest.mark.parametrize("driver", ["ESRI Shapefile", "OpenFileGDB"])
def test_write_memory_unsupported_driver(naturalearth_lowres, driver):
if driver == "OpenFileGDB" and __gdal_version__ < (3, 6, 0):
pytest.skip("OpenFileGDB write support only available for GDAL >= 3.6.0")
meta, _, geometry, field_data = read(naturalearth_lowres)
buffer = BytesIO()
with pytest.raises(
ValueError, match=f"writing to in-memory file is not supported for {driver}"
):
write(
buffer,
geometry,
field_data,
driver=driver,
layer="test",
append=True,
**meta,
)
@pytest.mark.parametrize("driver", ["GeoJSON", "GPKG"])
def test_write_memory_append_unsupported(naturalearth_lowres, driver):
meta, _, geometry, field_data = read(naturalearth_lowres)
meta.update({"geometry_type": "MultiPolygon"})
buffer = BytesIO()
with pytest.raises(
NotImplementedError, match="append is not supported for in-memory files"
):
write(
buffer,
geometry,
field_data,
driver=driver,
layer="test",
append=True,
**meta,
)
def test_write_memory_existing_unsupported(naturalearth_lowres):
meta, _, geometry, field_data = read(naturalearth_lowres)
buffer = BytesIO(b"0000")
with pytest.raises(
NotImplementedError,
match="writing to existing in-memory object is not supported",
):
write(buffer, geometry, field_data, driver="GeoJSON", layer="test", **meta)
def test_write_open_file_handle(tmp_path, naturalearth_lowres):
"""Verify that writing to an open file handle is not currently supported"""
meta, _, geometry, field_data = read(naturalearth_lowres)
# verify it fails for regular file handle
with pytest.raises(
NotImplementedError, match="writing to an open file handle is not yet supported"
):
with open(tmp_path / "test.geojson", "wb") as f:
write(f, geometry, field_data, driver="GeoJSON", layer="test", **meta)
# verify it fails for ZipFile
with pytest.raises(
NotImplementedError, match="writing to an open file handle is not yet supported"
):
with ZipFile(tmp_path / "test.geojson.zip", "w") as z:
with z.open("test.geojson", "w") as f:
write(f, geometry, field_data, driver="GeoJSON", layer="test", **meta)
@pytest.mark.parametrize("ext", ["fgb", "gpkg", "geojson"])
@pytest.mark.parametrize(
"read_encoding,write_encoding",
@@ -1075,7 +1293,7 @@ def test_encoding_io(tmp_path, ext, read_encoding, write_encoding):
np.array([mandarin], dtype=object),
]
fields = [arabic, cree, mandarin]
meta = dict(geometry_type="Point", crs="EPSG:4326", encoding=write_encoding)
meta = {"geometry_type": "Point", "crs": "EPSG:4326", "encoding": write_encoding}
filename = tmp_path / f"test.{ext}"
write(filename, geometry, field_data, fields, **meta)
@@ -1125,7 +1343,7 @@ def test_encoding_io_shapefile(tmp_path, read_encoding, write_encoding):
# character level) by GDAL when output to shapefile, so we have to truncate
# before writing
fields = [arabic[:5], cree[:3], mandarin]
meta = dict(geometry_type="Point", crs="EPSG:4326", encoding="UTF-8")
meta = {"geometry_type": "Point", "crs": "EPSG:4326", "encoding": "UTF-8"}
filename = tmp_path / "test.shp"
# NOTE: GDAL automatically creates a cpg file with the encoding name, which
@@ -1141,7 +1359,7 @@ def test_encoding_io_shapefile(tmp_path, read_encoding, write_encoding):
# verify that if cpg file is not present, that user-provided encoding is used,
# otherwise it defaults to ISO-8859-1
if read_encoding is not None:
os.unlink(str(filename).replace(".shp", ".cpg"))
filename.with_suffix(".cpg").unlink()
actual_meta, _, _, actual_field_data = read(filename, encoding=read_encoding)
assert np.array_equal(fields, actual_meta["fields"])
assert np.array_equal(field_data, actual_field_data)
@@ -1150,6 +1368,97 @@ def test_encoding_io_shapefile(tmp_path, read_encoding, write_encoding):
)
@pytest.mark.parametrize("ext", ["gpkg", "geojson"])
def test_non_utf8_encoding_io(tmp_path, ext, encoded_text):
"""Verify that we write non-UTF data to the data source
IMPORTANT: this may not be valid for the data source and will likely render
them unusable in other tools, but should successfully roundtrip unless we
disable writing using other encodings.
NOTE: FlatGeobuff driver cannot handle non-UTF data in GDAL >= 3.9
"""
encoding, text = encoded_text
# Point(0, 0)
geometry = np.array(
[bytes.fromhex("010100000000000000000000000000000000000000")], dtype=object
)
field_data = [np.array([text], dtype=object)]
fields = [text]
meta = {"geometry_type": "Point", "crs": "EPSG:4326", "encoding": encoding}
filename = tmp_path / f"test.{ext}"
write(filename, geometry, field_data, fields, **meta)
# cannot open these files without specifying encoding
with pytest.raises(UnicodeDecodeError):
read(filename)
with pytest.raises(UnicodeDecodeError):
read_info(filename)
# must provide encoding to read these properly
actual_meta, _, _, actual_field_data = read(filename, encoding=encoding)
assert actual_meta["fields"][0] == text
assert actual_field_data[0] == text
assert read_info(filename, encoding=encoding)["fields"][0] == text
def test_non_utf8_encoding_io_shapefile(tmp_path, encoded_text):
encoding, text = encoded_text
# Point(0, 0)
geometry = np.array(
[bytes.fromhex("010100000000000000000000000000000000000000")], dtype=object
)
field_data = [np.array([text], dtype=object)]
fields = [text]
meta = {"geometry_type": "Point", "crs": "EPSG:4326", "encoding": encoding}
filename = tmp_path / "test.shp"
write(filename, geometry, field_data, fields, **meta)
# NOTE: GDAL automatically creates a cpg file with the encoding name, which
# means that if we read this without specifying the encoding it uses the
# correct one
actual_meta, _, _, actual_field_data = read(filename)
assert actual_meta["fields"][0] == text
assert actual_field_data[0] == text
assert read_info(filename)["fields"][0] == text
# verify that if cpg file is not present, that user-provided encoding must be used
filename.with_suffix(".cpg").unlink()
# We will assume ISO-8859-1, which is wrong
miscoded = text.encode(encoding).decode("ISO-8859-1")
bad_meta, _, _, bad_field_data = read(filename)
assert bad_meta["fields"][0] == miscoded
assert bad_field_data[0] == miscoded
assert read_info(filename)["fields"][0] == miscoded
# If encoding is provided, that should yield correct text
actual_meta, _, _, actual_field_data = read(filename, encoding=encoding)
assert actual_meta["fields"][0] == text
assert actual_field_data[0] == text
assert read_info(filename, encoding=encoding)["fields"][0] == text
# verify that setting encoding does not corrupt SHAPE_ENCODING option if set
# globally (it is ignored during read when encoding is specified by user)
try:
set_gdal_config_options({"SHAPE_ENCODING": "CP1254"})
_ = read(filename, encoding=encoding)
assert get_gdal_config_option("SHAPE_ENCODING") == "CP1254"
finally:
# reset to clear between tests
set_gdal_config_options({"SHAPE_ENCODING": None})
def test_write_with_mask(tmp_path):
# Point(0, 0), null
geometry = np.array(
@@ -1159,7 +1468,7 @@ def test_write_with_mask(tmp_path):
field_data = [np.array([1, 2, 3], dtype="int32")]
field_mask = [np.array([False, True, False])]
fields = ["col"]
meta = dict(geometry_type="Point", crs="EPSG:4326")
meta = {"geometry_type": "Point", "crs": "EPSG:4326"}
filename = tmp_path / "test.geojson"
write(filename, geometry, field_data, fields, field_mask, **meta)
@@ -1176,3 +1485,31 @@ def test_write_with_mask(tmp_path):
field_mask = [np.array([False, True, False])] * 2
with pytest.raises(ValueError):
write(filename, geometry, field_data, fields, field_mask, **meta)
@requires_arrow_api
def test_open_arrow_capsule_protocol_without_pyarrow(naturalearth_lowres):
# this test is included here instead of test_arrow.py to ensure we also run
# it when pyarrow is not installed
with open_arrow(naturalearth_lowres) as (meta, reader):
assert isinstance(meta, dict)
assert isinstance(reader, pyogrio._io._ArrowStream)
capsule = reader.__arrow_c_stream__()
assert (
ctypes.pythonapi.PyCapsule_IsValid(
ctypes.py_object(capsule), b"arrow_array_stream"
)
== 1
)
@pytest.mark.skipif(HAS_PYARROW, reason="pyarrow is installed")
@requires_arrow_api
def test_open_arrow_error_no_pyarrow(naturalearth_lowres):
# this test is included here instead of test_arrow.py to ensure we run
# it when pyarrow is not installed
with pytest.raises(ImportError):
with open_arrow(naturalearth_lowres, use_pyarrow=True) as _:
pass

View File

@@ -1,86 +0,0 @@
"""Run pytest tests manually on Windows due to import errors
"""
from pathlib import Path
import platform
from tempfile import TemporaryDirectory
data_dir = Path(__file__).parent.resolve() / "fixtures"
if platform.system() == "Windows":
naturalearth_lowres = data_dir / Path("naturalearth_lowres/naturalearth_lowres.shp")
test_fgdb_vsi = f"/vsizip/{data_dir}/test_fgdb.gdb.zip"
from pyogrio.tests.test_core import test_read_info
try:
test_read_info(naturalearth_lowres)
except Exception as ex:
print(ex)
from pyogrio.tests.test_raw_io import (
test_read,
test_read_no_geometry,
test_read_columns,
test_read_skip_features,
test_read_max_features,
test_read_where,
test_read_where_invalid,
test_write,
test_write_gpkg,
test_write_geojson,
)
try:
test_read(naturalearth_lowres)
except Exception as ex:
print(ex)
try:
test_read_no_geometry(naturalearth_lowres)
except Exception as ex:
print(ex)
try:
test_read_columns(naturalearth_lowres)
except Exception as ex:
print(ex)
try:
test_read_skip_features(naturalearth_lowres)
except Exception as ex:
print(ex)
try:
test_read_max_features(naturalearth_lowres)
except Exception as ex:
print(ex)
try:
test_read_where(naturalearth_lowres)
except Exception as ex:
print(ex)
try:
test_read_where_invalid(naturalearth_lowres)
except Exception as ex:
print(ex)
with TemporaryDirectory() as tmpdir:
try:
test_write(tmpdir, naturalearth_lowres)
except Exception as ex:
print(ex)
with TemporaryDirectory() as tmpdir:
try:
test_write_gpkg(tmpdir, naturalearth_lowres)
except Exception as ex:
print(ex)
with TemporaryDirectory() as tmpdir:
try:
test_write_geojson(tmpdir, naturalearth_lowres)
except Exception as ex:
print(ex)

View File

@@ -1,41 +1,65 @@
"""Utility functions."""
import re
import sys
from packaging.version import Version
from pathlib import Path
from typing import Union
from urllib.parse import urlparse
from packaging.version import Version
from pyogrio._env import GDALEnv
with GDALEnv():
from pyogrio._ogr import buffer_to_virtual_file
from pyogrio._vsi import vsimem_rmtree_toplevel as _vsimem_rmtree_toplevel
def get_vsi_path(path_or_buffer):
def get_vsi_path_or_buffer(path_or_buffer):
"""Get VSI-prefixed path or bytes buffer depending on type of path_or_buffer.
If path_or_buffer is a bytes object, it will be returned directly and will
be read into an in-memory dataset when passed to one of the Cython functions.
If path_or_buffer is a file-like object with a read method, bytes will be
read from the file-like object and returned.
Otherwise, it will be converted to a string, and parsed to prefix with
appropriate GDAL /vsi*/ prefixes.
Parameters
----------
path_or_buffer : str, pathlib.Path, bytes, or file-like
A dataset path or URI, raw buffer, or file-like object with a read method.
Returns
-------
str or bytes
"""
# treat Path objects here already to ignore their read method + to avoid backslashes
# on Windows.
if isinstance(path_or_buffer, Path):
return vsi_path(path_or_buffer)
if isinstance(path_or_buffer, bytes):
return path_or_buffer
if hasattr(path_or_buffer, "read"):
path_or_buffer = path_or_buffer.read()
bytes_buffer = path_or_buffer.read()
buffer = None
if isinstance(path_or_buffer, bytes):
buffer = path_or_buffer
ext = ""
is_zipped = path_or_buffer[:4].startswith(b"PK\x03\x04")
if is_zipped:
ext = ".zip"
path = buffer_to_virtual_file(path_or_buffer, ext=ext)
if is_zipped:
path = "/vsizip/" + path
else:
path = vsi_path(str(path_or_buffer))
# rewind buffer if possible so that subsequent operations do not need to rewind
if hasattr(path_or_buffer, "seekable") and path_or_buffer.seekable():
path_or_buffer.seek(0)
return path, buffer
return bytes_buffer
return vsi_path(str(path_or_buffer))
def vsi_path(path: str) -> str:
"""
Ensure path is a local path or a GDAL-compatible vsi path.
"""
def vsi_path(path: Union[str, Path]) -> str:
"""Ensure path is a local path or a GDAL-compatible VSI path."""
# Convert Path objects to string, but for VSI paths, keep posix style path.
if isinstance(path, Path):
if sys.platform == "win32" and path.as_posix().startswith("/vsi"):
path = path.as_posix()
else:
path = str(path)
# path is already in GDAL format
if path.startswith("/vsi"):
@@ -78,12 +102,11 @@ SCHEMES = {
# those are for now not added as supported URI
}
CURLSCHEMES = set([k for k, v in SCHEMES.items() if v == "curl"])
CURLSCHEMES = {k for k, v in SCHEMES.items() if v == "curl"}
def _parse_uri(path: str):
"""
Parse a URI
"""Parse a URI.
Returns a tuples of (path, archive, scheme)
@@ -95,7 +118,7 @@ def _parse_uri(path: str):
scheme : str
URI scheme such as "https" or "zip+s3".
"""
parts = urlparse(path)
parts = urlparse(path, allow_fragments=False)
# if the scheme is not one of GDAL's supported schemes, return raw path
if parts.scheme and not all(p in SCHEMES for p in parts.scheme.split("+")):
@@ -118,8 +141,7 @@ def _parse_uri(path: str):
def _construct_vsi_path(path, archive, scheme) -> str:
"""Convert a parsed path to a GDAL VSI path"""
"""Convert a parsed path to a GDAL VSI path."""
prefix = ""
suffix = ""
schemes = scheme.split("+")
@@ -128,9 +150,7 @@ def _construct_vsi_path(path, archive, scheme) -> str:
schemes.insert(0, "zip")
if schemes:
prefix = "/".join(
"vsi{0}".format(SCHEMES[p]) for p in schemes if p and p != "file"
)
prefix = "/".join(f"vsi{SCHEMES[p]}" for p in schemes if p and p != "file")
if schemes[-1] in CURLSCHEMES:
suffix = f"{schemes[-1]}://"
@@ -139,15 +159,15 @@ def _construct_vsi_path(path, archive, scheme) -> str:
if archive:
return "/{}/{}{}/{}".format(prefix, suffix, archive, path.lstrip("/"))
else:
return "/{}/{}{}".format(prefix, suffix, path)
return f"/{prefix}/{suffix}{path}"
return path
def _preprocess_options_key_value(options):
"""
Preprocess options, eg `spatial_index=True` gets converted
to `SPATIAL_INDEX="YES"`.
"""Preprocess options.
For example, `spatial_index=True` gets converted to `SPATIAL_INDEX="YES"`.
"""
if not isinstance(options, dict):
raise TypeError(f"Expected options to be a dict, got {type(options)}")
@@ -171,6 +191,7 @@ def _mask_to_wkb(mask):
Parameters
----------
mask : Shapely geometry
The geometry to convert to WKB.
Returns
-------
@@ -181,8 +202,8 @@ def _mask_to_wkb(mask):
ValueError
raised if Shapely >= 2.0 is not available or mask is not a Shapely
Geometry object
"""
"""
if mask is None:
return mask
@@ -201,3 +222,26 @@ def _mask_to_wkb(mask):
raise ValueError("'mask' parameter must be a Shapely geometry")
return shapely.to_wkb(mask)
def vsimem_rmtree_toplevel(path: Union[str, Path]):
"""Remove the parent directory of the file path recursively.
This is used for final cleanup of an in-memory dataset, which may have been
created within a directory to contain sibling files.
Additional VSI handlers may be chained to the left of /vsimem/ in path and
will be ignored.
Remark: function is defined here to be able to run tests on it.
Parameters
----------
path : str or pathlib.Path
path to in-memory file
"""
if isinstance(path, Path):
path = path.as_posix()
_vsimem_rmtree_toplevel(path)