that's too much!
This commit is contained in:
19
.venv/lib/python3.12/site-packages/fiona/fio/__init__.py
Normal file
19
.venv/lib/python3.12/site-packages/fiona/fio/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Fiona's command line interface"""
|
||||
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def with_context_env(f):
|
||||
"""Pops the Fiona Env from the passed context and executes the
|
||||
wrapped func in the context of that obj.
|
||||
|
||||
Click's pass_context decorator must precede this decorator, or else
|
||||
there will be no context in the wrapper args.
|
||||
"""
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwds):
|
||||
ctx = args[0]
|
||||
env = ctx.obj.pop('env')
|
||||
with env:
|
||||
return f(*args, **kwds)
|
||||
return wrapper
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
89
.venv/lib/python3.12/site-packages/fiona/fio/bounds.py
Normal file
89
.venv/lib/python3.12/site-packages/fiona/fio/bounds.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""$ fio bounds"""
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
from cligj import precision_opt, use_rs_opt
|
||||
|
||||
import fiona
|
||||
from fiona.fio.helpers import obj_gen
|
||||
from fiona.fio import with_context_env
|
||||
from fiona.model import ObjectEncoder
|
||||
|
||||
|
||||
@click.command(short_help="Print the extent of GeoJSON objects")
|
||||
@precision_opt
|
||||
@click.option('--explode/--no-explode', default=False,
|
||||
help="Explode collections into features (default: no).")
|
||||
@click.option('--with-id/--without-id', default=False,
|
||||
help="Print GeoJSON ids and bounding boxes together "
|
||||
"(default: without).")
|
||||
@click.option('--with-obj/--without-obj', default=False,
|
||||
help="Print GeoJSON objects and bounding boxes together "
|
||||
"(default: without).")
|
||||
@use_rs_opt
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def bounds(ctx, precision, explode, with_id, with_obj, use_rs):
|
||||
"""Print the bounding boxes of GeoJSON objects read from stdin.
|
||||
|
||||
Optionally explode collections and print the bounds of their
|
||||
features.
|
||||
|
||||
To print identifiers for input objects along with their bounds
|
||||
as a {id: identifier, bbox: bounds} JSON object, use --with-id.
|
||||
|
||||
To print the input objects themselves along with their bounds
|
||||
as GeoJSON object, use --with-obj. This has the effect of updating
|
||||
input objects with {id: identifier, bbox: bounds}.
|
||||
|
||||
"""
|
||||
stdin = click.get_text_stream('stdin')
|
||||
source = obj_gen(stdin)
|
||||
|
||||
for i, obj in enumerate(source):
|
||||
obj_id = obj.get("id", "collection:" + str(i))
|
||||
xs = []
|
||||
ys = []
|
||||
features = obj.get("features") or [obj]
|
||||
|
||||
for j, feat in enumerate(features):
|
||||
feat_id = feat.get("id", "feature:" + str(i))
|
||||
w, s, e, n = fiona.bounds(feat)
|
||||
|
||||
if precision > 0:
|
||||
w, s, e, n = (round(v, precision) for v in (w, s, e, n))
|
||||
if explode:
|
||||
|
||||
if with_id:
|
||||
rec = {"parent": obj_id, "id": feat_id, "bbox": (w, s, e, n)}
|
||||
elif with_obj:
|
||||
feat.update(parent=obj_id, bbox=(w, s, e, n))
|
||||
rec = feat
|
||||
else:
|
||||
rec = (w, s, e, n)
|
||||
|
||||
if use_rs:
|
||||
click.echo('\x1e', nl=False)
|
||||
|
||||
click.echo(json.dumps(rec, cls=ObjectEncoder))
|
||||
|
||||
else:
|
||||
xs.extend([w, e])
|
||||
ys.extend([s, n])
|
||||
|
||||
if not explode:
|
||||
w, s, e, n = (min(xs), min(ys), max(xs), max(ys))
|
||||
|
||||
if with_id:
|
||||
rec = {"id": obj_id, "bbox": (w, s, e, n)}
|
||||
elif with_obj:
|
||||
obj.update(id=obj_id, bbox=(w, s, e, n))
|
||||
rec = obj
|
||||
else:
|
||||
rec = (w, s, e, n)
|
||||
|
||||
if use_rs:
|
||||
click.echo("\x1e", nl=False)
|
||||
|
||||
click.echo(json.dumps(rec, cls=ObjectEncoder))
|
||||
63
.venv/lib/python3.12/site-packages/fiona/fio/calc.py
Normal file
63
.venv/lib/python3.12/site-packages/fiona/fio/calc.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import json
|
||||
|
||||
import click
|
||||
from cligj import use_rs_opt
|
||||
|
||||
from .helpers import obj_gen, eval_feature_expression
|
||||
from fiona.fio import with_context_env
|
||||
from fiona.model import ObjectEncoder
|
||||
|
||||
|
||||
@click.command(short_help="Calculate GeoJSON property by Python expression")
|
||||
@click.argument('property_name')
|
||||
@click.argument('expression')
|
||||
@click.option('--overwrite', is_flag=True, default=False,
|
||||
help="Overwrite properties, default: False")
|
||||
@use_rs_opt
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def calc(ctx, property_name, expression, overwrite, use_rs):
|
||||
"""
|
||||
Create a new property on GeoJSON features using the specified expression.
|
||||
|
||||
\b
|
||||
The expression is evaluated in a restricted namespace containing:
|
||||
- sum, pow, min, max and the imported math module
|
||||
- shape (optional, imported from shapely.geometry if available)
|
||||
- bool, int, str, len, float type conversions
|
||||
- f (the feature to be evaluated,
|
||||
allows item access via javascript-style dot notation using munch)
|
||||
|
||||
The expression will be evaluated for each feature and its
|
||||
return value will be added to the properties
|
||||
as the specified property_name. Existing properties will not
|
||||
be overwritten by default (an Exception is raised).
|
||||
|
||||
Example
|
||||
|
||||
\b
|
||||
$ fio cat data.shp | fio calc sumAB "f.properties.A + f.properties.B"
|
||||
|
||||
"""
|
||||
stdin = click.get_text_stream('stdin')
|
||||
source = obj_gen(stdin)
|
||||
|
||||
for i, obj in enumerate(source):
|
||||
features = obj.get("features") or [obj]
|
||||
|
||||
for j, feat in enumerate(features):
|
||||
|
||||
if not overwrite and property_name in feat["properties"]:
|
||||
raise click.UsageError(
|
||||
"{} already exists in properties; "
|
||||
"rename or use --overwrite".format(property_name)
|
||||
)
|
||||
|
||||
feat["properties"][property_name] = eval_feature_expression(
|
||||
feat, expression
|
||||
)
|
||||
|
||||
if use_rs:
|
||||
click.echo("\x1e", nl=False)
|
||||
|
||||
click.echo(json.dumps(feat, cls=ObjectEncoder))
|
||||
139
.venv/lib/python3.12/site-packages/fiona/fio/cat.py
Normal file
139
.venv/lib/python3.12/site-packages/fiona/fio/cat.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""fio-cat"""
|
||||
|
||||
import json
|
||||
import warnings
|
||||
|
||||
import click
|
||||
import cligj
|
||||
|
||||
import fiona
|
||||
from fiona.transform import transform_geom
|
||||
from fiona.model import Feature, ObjectEncoder
|
||||
from fiona.fio import options, with_context_env
|
||||
from fiona.fio.helpers import recursive_round
|
||||
from fiona.errors import AttributeFilterError
|
||||
|
||||
warnings.simplefilter("default")
|
||||
|
||||
|
||||
# Cat command
|
||||
@click.command(short_help="Concatenate and print the features of datasets")
|
||||
@click.argument("files", nargs=-1, required=True, metavar="INPUTS...")
|
||||
@click.option(
|
||||
"--layer",
|
||||
default=None,
|
||||
multiple=True,
|
||||
callback=options.cb_multilayer,
|
||||
help="Input layer(s), specified as 'fileindex:layer` "
|
||||
"For example, '1:foo,2:bar' will concatenate layer foo "
|
||||
"from file 1 and layer bar from file 2",
|
||||
)
|
||||
@cligj.precision_opt
|
||||
@cligj.indent_opt
|
||||
@cligj.compact_opt
|
||||
@click.option(
|
||||
"--ignore-errors/--no-ignore-errors",
|
||||
default=False,
|
||||
help="log errors but do not stop serialization.",
|
||||
)
|
||||
@options.dst_crs_opt
|
||||
@cligj.use_rs_opt
|
||||
@click.option(
|
||||
"--bbox",
|
||||
default=None,
|
||||
metavar="w,s,e,n",
|
||||
help="filter for features intersecting a bounding box",
|
||||
)
|
||||
@click.option(
|
||||
"--where",
|
||||
default=None,
|
||||
help="attribute filter using SQL where clause",
|
||||
)
|
||||
@click.option(
|
||||
"--cut-at-antimeridian",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Optionally cut geometries at the anti-meridian. To be used only for a geographic destination CRS.",
|
||||
)
|
||||
@click.option('--where', default=None,
|
||||
help="attribute filter using SQL where clause")
|
||||
@options.open_opt
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def cat(
|
||||
ctx,
|
||||
files,
|
||||
precision,
|
||||
indent,
|
||||
compact,
|
||||
ignore_errors,
|
||||
dst_crs,
|
||||
use_rs,
|
||||
bbox,
|
||||
where,
|
||||
cut_at_antimeridian,
|
||||
layer,
|
||||
open_options,
|
||||
):
|
||||
"""
|
||||
Concatenate and print the features of input datasets as a sequence of
|
||||
GeoJSON features.
|
||||
|
||||
When working with a multi-layer dataset the first layer is used by default.
|
||||
Use the '--layer' option to select a different layer.
|
||||
|
||||
"""
|
||||
dump_kwds = {"sort_keys": True}
|
||||
if indent:
|
||||
dump_kwds["indent"] = indent
|
||||
if compact:
|
||||
dump_kwds["separators"] = (",", ":")
|
||||
|
||||
# Validate file idexes provided in --layer option
|
||||
# (can't pass the files to option callback)
|
||||
if layer:
|
||||
options.validate_multilayer_file_index(files, layer)
|
||||
|
||||
# first layer is the default
|
||||
for i in range(1, len(files) + 1):
|
||||
if str(i) not in layer.keys():
|
||||
layer[str(i)] = [0]
|
||||
|
||||
try:
|
||||
if bbox:
|
||||
try:
|
||||
bbox = tuple(map(float, bbox.split(",")))
|
||||
except ValueError:
|
||||
bbox = json.loads(bbox)
|
||||
|
||||
for i, path in enumerate(files, 1):
|
||||
for lyr in layer[str(i)]:
|
||||
with fiona.open(path, layer=lyr, **open_options) as src:
|
||||
for i, feat in src.items(bbox=bbox, where=where):
|
||||
geom = feat.geometry
|
||||
|
||||
if dst_crs:
|
||||
geom = transform_geom(
|
||||
src.crs,
|
||||
dst_crs,
|
||||
geom,
|
||||
antimeridian_cutting=cut_at_antimeridian,
|
||||
)
|
||||
|
||||
if precision >= 0:
|
||||
geom = recursive_round(geom, precision)
|
||||
|
||||
feat = Feature(
|
||||
id=feat.id,
|
||||
properties=feat.properties,
|
||||
geometry=geom,
|
||||
bbox=fiona.bounds(geom),
|
||||
)
|
||||
|
||||
if use_rs:
|
||||
click.echo("\x1e", nl=False)
|
||||
|
||||
click.echo(json.dumps(feat, cls=ObjectEncoder, **dump_kwds))
|
||||
|
||||
except AttributeFilterError as e:
|
||||
raise click.BadParameter("'where' clause is invalid: " + str(e))
|
||||
245
.venv/lib/python3.12/site-packages/fiona/fio/collect.py
Normal file
245
.venv/lib/python3.12/site-packages/fiona/fio/collect.py
Normal file
@@ -0,0 +1,245 @@
|
||||
"""fio-collect"""
|
||||
|
||||
from functools import partial
|
||||
import json
|
||||
import logging
|
||||
|
||||
import click
|
||||
import cligj
|
||||
|
||||
from fiona.fio import helpers, options, with_context_env
|
||||
from fiona.model import Geometry, ObjectEncoder
|
||||
from fiona.transform import transform_geom
|
||||
|
||||
|
||||
@click.command(short_help="Collect a sequence of features.")
|
||||
@cligj.precision_opt
|
||||
@cligj.indent_opt
|
||||
@cligj.compact_opt
|
||||
@click.option(
|
||||
"--record-buffered/--no-record-buffered",
|
||||
default=False,
|
||||
help="Economical buffering of writes at record, not collection "
|
||||
"(default), level.",
|
||||
)
|
||||
@click.option(
|
||||
"--ignore-errors/--no-ignore-errors",
|
||||
default=False,
|
||||
help="log errors but do not stop serialization.",
|
||||
)
|
||||
@options.src_crs_opt
|
||||
@click.option(
|
||||
"--with-ld-context/--without-ld-context",
|
||||
default=False,
|
||||
help="add a JSON-LD context to JSON output.",
|
||||
)
|
||||
@click.option(
|
||||
"--add-ld-context-item",
|
||||
multiple=True,
|
||||
help="map a term to a URI and add it to the output's JSON LD " "context.",
|
||||
)
|
||||
@click.option(
|
||||
"--parse/--no-parse",
|
||||
default=True,
|
||||
help="load and dump the geojson feature (default is True)",
|
||||
)
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def collect(
|
||||
ctx,
|
||||
precision,
|
||||
indent,
|
||||
compact,
|
||||
record_buffered,
|
||||
ignore_errors,
|
||||
src_crs,
|
||||
with_ld_context,
|
||||
add_ld_context_item,
|
||||
parse,
|
||||
):
|
||||
"""Make a GeoJSON feature collection from a sequence of GeoJSON
|
||||
features and print it."""
|
||||
logger = logging.getLogger(__name__)
|
||||
stdin = click.get_text_stream("stdin")
|
||||
sink = click.get_text_stream("stdout")
|
||||
|
||||
dump_kwds = {"sort_keys": True}
|
||||
if indent:
|
||||
dump_kwds["indent"] = indent
|
||||
if compact:
|
||||
dump_kwds["separators"] = (",", ":")
|
||||
item_sep = compact and "," or ", "
|
||||
|
||||
if src_crs:
|
||||
if not parse:
|
||||
raise click.UsageError("Can't specify --src-crs with --no-parse")
|
||||
transformer = partial(
|
||||
transform_geom,
|
||||
src_crs,
|
||||
"EPSG:4326",
|
||||
antimeridian_cutting=True,
|
||||
precision=precision,
|
||||
)
|
||||
else:
|
||||
|
||||
def transformer(x):
|
||||
return x
|
||||
|
||||
first_line = next(stdin)
|
||||
|
||||
# If parsing geojson
|
||||
if parse:
|
||||
# If input is RS-delimited JSON sequence.
|
||||
if first_line.startswith("\x1e"):
|
||||
|
||||
def feature_text_gen():
|
||||
buffer = first_line.strip("\x1e")
|
||||
for line in stdin:
|
||||
if line.startswith("\x1e"):
|
||||
if buffer:
|
||||
feat = json.loads(buffer)
|
||||
feat["geometry"] = transformer(
|
||||
Geometry.from_dict(**feat["geometry"])
|
||||
)
|
||||
yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds)
|
||||
buffer = line.strip("\x1e")
|
||||
else:
|
||||
buffer += line
|
||||
else:
|
||||
feat = json.loads(buffer)
|
||||
feat["geometry"] = transformer(
|
||||
Geometry.from_dict(**feat["geometry"])
|
||||
)
|
||||
yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds)
|
||||
|
||||
else:
|
||||
|
||||
def feature_text_gen():
|
||||
feat = json.loads(first_line)
|
||||
feat["geometry"] = transformer(Geometry.from_dict(**feat["geometry"]))
|
||||
yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds)
|
||||
|
||||
for line in stdin:
|
||||
feat = json.loads(line)
|
||||
feat["geometry"] = transformer(
|
||||
Geometry.from_dict(**feat["geometry"])
|
||||
)
|
||||
yield json.dumps(feat, cls=ObjectEncoder, **dump_kwds)
|
||||
|
||||
# If *not* parsing geojson
|
||||
else:
|
||||
# If input is RS-delimited JSON sequence.
|
||||
if first_line.startswith("\x1e"):
|
||||
|
||||
def feature_text_gen():
|
||||
buffer = first_line.strip("\x1e")
|
||||
for line in stdin:
|
||||
if line.startswith("\x1e"):
|
||||
if buffer:
|
||||
yield buffer
|
||||
buffer = line.strip("\x1e")
|
||||
else:
|
||||
buffer += line
|
||||
else:
|
||||
yield buffer
|
||||
|
||||
else:
|
||||
|
||||
def feature_text_gen():
|
||||
yield first_line
|
||||
yield from stdin
|
||||
|
||||
source = feature_text_gen()
|
||||
|
||||
if record_buffered:
|
||||
# Buffer GeoJSON data at the feature level for smaller
|
||||
# memory footprint.
|
||||
indented = bool(indent)
|
||||
rec_indent = "\n" + " " * (2 * (indent or 0))
|
||||
|
||||
collection = {"type": "FeatureCollection", "features": []}
|
||||
if with_ld_context:
|
||||
collection["@context"] = helpers.make_ld_context(add_ld_context_item)
|
||||
|
||||
head, tail = json.dumps(collection, cls=ObjectEncoder, **dump_kwds).split("[]")
|
||||
|
||||
sink.write(head)
|
||||
sink.write("[")
|
||||
|
||||
# Try the first record.
|
||||
try:
|
||||
i, first = 0, next(source)
|
||||
if with_ld_context:
|
||||
first = helpers.id_record(first)
|
||||
if indented:
|
||||
sink.write(rec_indent)
|
||||
sink.write(first.replace("\n", rec_indent))
|
||||
except StopIteration:
|
||||
pass
|
||||
except Exception as exc:
|
||||
# Ignoring errors is *not* the default.
|
||||
if ignore_errors:
|
||||
logger.error(
|
||||
"failed to serialize file record %d (%s), " "continuing", i, exc
|
||||
)
|
||||
else:
|
||||
# Log error and close up the GeoJSON, leaving it
|
||||
# more or less valid no matter what happens above.
|
||||
logger.critical(
|
||||
"failed to serialize file record %d (%s), " "quiting", i, exc
|
||||
)
|
||||
sink.write("]")
|
||||
sink.write(tail)
|
||||
if indented:
|
||||
sink.write("\n")
|
||||
raise
|
||||
|
||||
# Because trailing commas aren't valid in JSON arrays
|
||||
# we'll write the item separator before each of the
|
||||
# remaining features.
|
||||
for i, rec in enumerate(source, 1):
|
||||
try:
|
||||
if with_ld_context:
|
||||
rec = helpers.id_record(rec)
|
||||
if indented:
|
||||
sink.write(rec_indent)
|
||||
sink.write(item_sep)
|
||||
sink.write(rec.replace("\n", rec_indent))
|
||||
except Exception as exc:
|
||||
if ignore_errors:
|
||||
logger.error(
|
||||
"failed to serialize file record %d (%s), " "continuing",
|
||||
i,
|
||||
exc,
|
||||
)
|
||||
else:
|
||||
logger.critical(
|
||||
"failed to serialize file record %d (%s), " "quiting",
|
||||
i,
|
||||
exc,
|
||||
)
|
||||
sink.write("]")
|
||||
sink.write(tail)
|
||||
if indented:
|
||||
sink.write("\n")
|
||||
raise
|
||||
|
||||
# Close up the GeoJSON after writing all features.
|
||||
sink.write("]")
|
||||
sink.write(tail)
|
||||
if indented:
|
||||
sink.write("\n")
|
||||
|
||||
else:
|
||||
# Buffer GeoJSON data at the collection level. The default.
|
||||
collection = {"type": "FeatureCollection", "features": []}
|
||||
if with_ld_context:
|
||||
collection["@context"] = helpers.make_ld_context(add_ld_context_item)
|
||||
|
||||
head, tail = json.dumps(collection, cls=ObjectEncoder, **dump_kwds).split("[]")
|
||||
sink.write(head)
|
||||
sink.write("[")
|
||||
sink.write(",".join(source))
|
||||
sink.write("]")
|
||||
sink.write(tail)
|
||||
sink.write("\n")
|
||||
35
.venv/lib/python3.12/site-packages/fiona/fio/distrib.py
Normal file
35
.venv/lib/python3.12/site-packages/fiona/fio/distrib.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""$ fio distrib"""
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
import cligj
|
||||
|
||||
from fiona.fio import helpers, with_context_env
|
||||
from fiona.model import ObjectEncoder
|
||||
|
||||
|
||||
@click.command()
|
||||
@cligj.use_rs_opt
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def distrib(ctx, use_rs):
|
||||
"""Distribute features from a collection.
|
||||
|
||||
Print the features of GeoJSON objects read from stdin.
|
||||
|
||||
"""
|
||||
stdin = click.get_text_stream('stdin')
|
||||
source = helpers.obj_gen(stdin)
|
||||
|
||||
for i, obj in enumerate(source):
|
||||
obj_id = obj.get("id", "collection:" + str(i))
|
||||
features = obj.get("features") or [obj]
|
||||
for j, feat in enumerate(features):
|
||||
if obj.get("type") == "FeatureCollection":
|
||||
feat["parent"] = obj_id
|
||||
feat_id = feat.get("id", "feature:" + str(i))
|
||||
feat["id"] = feat_id
|
||||
if use_rs:
|
||||
click.echo("\x1e", nl=False)
|
||||
click.echo(json.dumps(feat, cls=ObjectEncoder))
|
||||
198
.venv/lib/python3.12/site-packages/fiona/fio/dump.py
Normal file
198
.venv/lib/python3.12/site-packages/fiona/fio/dump.py
Normal file
@@ -0,0 +1,198 @@
|
||||
"""fio-dump"""
|
||||
|
||||
from functools import partial
|
||||
import json
|
||||
import logging
|
||||
|
||||
import click
|
||||
import cligj
|
||||
|
||||
import fiona
|
||||
from fiona.fio import helpers, options, with_context_env
|
||||
from fiona.model import Feature, ObjectEncoder
|
||||
from fiona.transform import transform_geom
|
||||
|
||||
|
||||
@click.command(short_help="Dump a dataset to GeoJSON.")
|
||||
@click.argument('input', required=True)
|
||||
@click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer,
|
||||
help="Print information about a specific layer. The first "
|
||||
"layer is used by default. Layers use zero-based "
|
||||
"numbering when accessed by index.")
|
||||
@click.option('--encoding', help="Specify encoding of the input file.")
|
||||
@cligj.precision_opt
|
||||
@cligj.indent_opt
|
||||
@cligj.compact_opt
|
||||
@click.option('--record-buffered/--no-record-buffered', default=False,
|
||||
help="Economical buffering of writes at record, not collection "
|
||||
"(default), level.")
|
||||
@click.option('--ignore-errors/--no-ignore-errors', default=False,
|
||||
help="log errors but do not stop serialization.")
|
||||
@click.option('--with-ld-context/--without-ld-context', default=False,
|
||||
help="add a JSON-LD context to JSON output.")
|
||||
@click.option('--add-ld-context-item', multiple=True,
|
||||
help="map a term to a URI and add it to the output's JSON LD "
|
||||
"context.")
|
||||
@options.open_opt
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def dump(
|
||||
ctx,
|
||||
input,
|
||||
encoding,
|
||||
precision,
|
||||
indent,
|
||||
compact,
|
||||
record_buffered,
|
||||
ignore_errors,
|
||||
with_ld_context,
|
||||
add_ld_context_item,
|
||||
layer,
|
||||
open_options,
|
||||
):
|
||||
|
||||
"""Dump a dataset either as a GeoJSON feature collection (the default)
|
||||
or a sequence of GeoJSON features."""
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
sink = click.get_text_stream('stdout')
|
||||
|
||||
dump_kwds = {'sort_keys': True}
|
||||
if indent:
|
||||
dump_kwds['indent'] = indent
|
||||
if compact:
|
||||
dump_kwds['separators'] = (',', ':')
|
||||
item_sep = compact and ',' or ', '
|
||||
|
||||
if encoding:
|
||||
open_options["encoding"] = encoding
|
||||
if layer:
|
||||
open_options["layer"] = layer
|
||||
|
||||
def transformer(crs, feat):
|
||||
tg = partial(
|
||||
transform_geom,
|
||||
crs,
|
||||
"EPSG:4326",
|
||||
antimeridian_cutting=True,
|
||||
precision=precision,
|
||||
)
|
||||
return Feature(
|
||||
id=feat.id, properties=feat.properties, geometry=tg(feat.geometry)
|
||||
)
|
||||
|
||||
with fiona.open(input, **open_options) as source:
|
||||
meta = source.meta
|
||||
meta["fields"] = dict(source.schema["properties"].items())
|
||||
|
||||
if record_buffered:
|
||||
# Buffer GeoJSON data at the feature level for smaller
|
||||
# memory footprint.
|
||||
indented = bool(indent)
|
||||
rec_indent = "\n" + " " * (2 * (indent or 0))
|
||||
|
||||
collection = {
|
||||
"type": "FeatureCollection",
|
||||
"fiona:schema": meta["schema"],
|
||||
"fiona:crs": meta["crs"],
|
||||
"features": [],
|
||||
}
|
||||
if with_ld_context:
|
||||
collection["@context"] = helpers.make_ld_context(add_ld_context_item)
|
||||
|
||||
head, tail = json.dumps(collection, **dump_kwds).split("[]")
|
||||
|
||||
sink.write(head)
|
||||
sink.write("[")
|
||||
|
||||
itr = iter(source)
|
||||
|
||||
# Try the first record.
|
||||
try:
|
||||
i, first = 0, next(itr)
|
||||
first = transformer(first)
|
||||
if with_ld_context:
|
||||
first = helpers.id_record(first)
|
||||
if indented:
|
||||
sink.write(rec_indent)
|
||||
sink.write(
|
||||
json.dumps(first, cls=ObjectEncoder, **dump_kwds).replace(
|
||||
"\n", rec_indent
|
||||
)
|
||||
)
|
||||
except StopIteration:
|
||||
pass
|
||||
except Exception as exc:
|
||||
# Ignoring errors is *not* the default.
|
||||
if ignore_errors:
|
||||
logger.error(
|
||||
"failed to serialize file record %d (%s), " "continuing", i, exc
|
||||
)
|
||||
else:
|
||||
# Log error and close up the GeoJSON, leaving it
|
||||
# more or less valid no matter what happens above.
|
||||
logger.critical(
|
||||
"failed to serialize file record %d (%s), " "quiting", i, exc
|
||||
)
|
||||
sink.write("]")
|
||||
sink.write(tail)
|
||||
if indented:
|
||||
sink.write("\n")
|
||||
raise
|
||||
|
||||
# Because trailing commas aren't valid in JSON arrays
|
||||
# we'll write the item separator before each of the
|
||||
# remaining features.
|
||||
for i, rec in enumerate(itr, 1):
|
||||
rec = transformer(rec)
|
||||
try:
|
||||
if with_ld_context:
|
||||
rec = helpers.id_record(rec)
|
||||
if indented:
|
||||
sink.write(rec_indent)
|
||||
sink.write(item_sep)
|
||||
sink.write(
|
||||
json.dumps(rec, cls=ObjectEncoder, **dump_kwds).replace(
|
||||
"\n", rec_indent
|
||||
)
|
||||
)
|
||||
except Exception as exc:
|
||||
if ignore_errors:
|
||||
logger.error(
|
||||
"failed to serialize file record %d (%s), "
|
||||
"continuing",
|
||||
i, exc)
|
||||
else:
|
||||
logger.critical(
|
||||
"failed to serialize file record %d (%s), "
|
||||
"quiting",
|
||||
i, exc)
|
||||
sink.write("]")
|
||||
sink.write(tail)
|
||||
if indented:
|
||||
sink.write("\n")
|
||||
raise
|
||||
|
||||
# Close up the GeoJSON after writing all features.
|
||||
sink.write("]")
|
||||
sink.write(tail)
|
||||
if indented:
|
||||
sink.write("\n")
|
||||
|
||||
else:
|
||||
# Buffer GeoJSON data at the collection level. The default.
|
||||
collection = {
|
||||
"type": "FeatureCollection",
|
||||
"fiona:schema": meta["schema"],
|
||||
"fiona:crs": meta["crs"].to_string(),
|
||||
}
|
||||
if with_ld_context:
|
||||
collection["@context"] = helpers.make_ld_context(add_ld_context_item)
|
||||
collection["features"] = [
|
||||
helpers.id_record(transformer(rec)) for rec in source
|
||||
]
|
||||
else:
|
||||
collection["features"] = [
|
||||
transformer(source.crs, rec) for rec in source
|
||||
]
|
||||
json.dump(collection, sink, cls=ObjectEncoder, **dump_kwds)
|
||||
38
.venv/lib/python3.12/site-packages/fiona/fio/env.py
Normal file
38
.venv/lib/python3.12/site-packages/fiona/fio/env.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""$ fio env"""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
import fiona
|
||||
from fiona._env import GDALDataFinder, PROJDataFinder
|
||||
|
||||
|
||||
@click.command(short_help="Print information about the fio environment.")
|
||||
@click.option('--formats', 'key', flag_value='formats', default=True,
|
||||
help="Enumerate the available formats.")
|
||||
@click.option('--credentials', 'key', flag_value='credentials', default=False,
|
||||
help="Print credentials.")
|
||||
@click.option('--gdal-data', 'key', flag_value='gdal_data', default=False,
|
||||
help="Print GDAL data path.")
|
||||
@click.option('--proj-data', 'key', flag_value='proj_data', default=False,
|
||||
help="Print PROJ data path.")
|
||||
@click.pass_context
|
||||
def env(ctx, key):
|
||||
"""Print information about the Fiona environment: available
|
||||
formats, etc.
|
||||
"""
|
||||
stdout = click.get_text_stream('stdout')
|
||||
with ctx.obj['env'] as env:
|
||||
if key == 'formats':
|
||||
for k, v in sorted(fiona.supported_drivers.items()):
|
||||
modes = ', '.join("'" + m + "'" for m in v)
|
||||
stdout.write(f"{k} (modes {modes})\n")
|
||||
stdout.write('\n')
|
||||
elif key == 'credentials':
|
||||
click.echo(json.dumps(env.session.credentials))
|
||||
elif key == 'gdal_data':
|
||||
click.echo(os.environ.get('GDAL_DATA') or GDALDataFinder().search())
|
||||
elif key == 'proj_data':
|
||||
click.echo(os.environ.get('PROJ_DATA', os.environ.get('PROJ_LIB')) or PROJDataFinder().search())
|
||||
54
.venv/lib/python3.12/site-packages/fiona/fio/filter.py
Normal file
54
.venv/lib/python3.12/site-packages/fiona/fio/filter.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""$ fio filter"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
import click
|
||||
from cligj import use_rs_opt
|
||||
|
||||
from fiona.fio.helpers import obj_gen, eval_feature_expression
|
||||
from fiona.fio import with_context_env
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.argument('filter_expression')
|
||||
@use_rs_opt
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def filter(ctx, filter_expression, use_rs):
|
||||
"""
|
||||
Filter GeoJSON features by python expression.
|
||||
|
||||
Features are read from stdin.
|
||||
|
||||
The expression is evaluated in a restricted namespace containing:
|
||||
- sum, pow, min, max and the imported math module
|
||||
- shape (optional, imported from shapely.geometry if available)
|
||||
- bool, int, str, len, float type conversions
|
||||
- f (the feature to be evaluated,
|
||||
allows item access via javascript-style dot notation using munch)
|
||||
|
||||
The expression will be evaluated for each feature and, if true,
|
||||
the feature will be included in the output. For example:
|
||||
|
||||
\b
|
||||
$ fio cat data.shp \\
|
||||
| fio filter "f.properties.area > 1000.0" \\
|
||||
| fio collect > large_polygons.geojson
|
||||
|
||||
"""
|
||||
stdin = click.get_text_stream('stdin')
|
||||
source = obj_gen(stdin)
|
||||
|
||||
for i, obj in enumerate(source):
|
||||
features = obj.get("features") or [obj]
|
||||
for j, feat in enumerate(features):
|
||||
if not eval_feature_expression(feat, filter_expression):
|
||||
continue
|
||||
|
||||
if use_rs:
|
||||
click.echo("\x1e", nl=False)
|
||||
click.echo(json.dumps(feat))
|
||||
134
.venv/lib/python3.12/site-packages/fiona/fio/helpers.py
Normal file
134
.venv/lib/python3.12/site-packages/fiona/fio/helpers.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""Helper objects needed by multiple CLI commands.
|
||||
|
||||
"""
|
||||
|
||||
from functools import partial
|
||||
import json
|
||||
import math
|
||||
import warnings
|
||||
|
||||
from fiona.model import Geometry, to_dict
|
||||
from fiona._vendor.munch import munchify
|
||||
|
||||
|
||||
warnings.simplefilter("default")
|
||||
|
||||
|
||||
def obj_gen(lines, object_hook=None):
|
||||
"""Return a generator of JSON objects loaded from ``lines``."""
|
||||
first_line = next(lines)
|
||||
if first_line.startswith("\x1e"):
|
||||
|
||||
def gen():
|
||||
buffer = first_line.strip("\x1e")
|
||||
for line in lines:
|
||||
if line.startswith("\x1e"):
|
||||
if buffer:
|
||||
yield json.loads(buffer, object_hook=object_hook)
|
||||
buffer = line.strip("\x1e")
|
||||
else:
|
||||
buffer += line
|
||||
else:
|
||||
yield json.loads(buffer, object_hook=object_hook)
|
||||
|
||||
else:
|
||||
|
||||
def gen():
|
||||
yield json.loads(first_line, object_hook=object_hook)
|
||||
for line in lines:
|
||||
yield json.loads(line, object_hook=object_hook)
|
||||
|
||||
return gen()
|
||||
|
||||
|
||||
def nullable(val, cast):
|
||||
if val is None:
|
||||
return None
|
||||
else:
|
||||
return cast(val)
|
||||
|
||||
|
||||
def eval_feature_expression(feature, expression):
|
||||
safe_dict = {"f": munchify(to_dict(feature))}
|
||||
safe_dict.update(
|
||||
{
|
||||
"sum": sum,
|
||||
"pow": pow,
|
||||
"min": min,
|
||||
"max": max,
|
||||
"math": math,
|
||||
"bool": bool,
|
||||
"int": partial(nullable, int),
|
||||
"str": partial(nullable, str),
|
||||
"float": partial(nullable, float),
|
||||
"len": partial(nullable, len),
|
||||
}
|
||||
)
|
||||
try:
|
||||
from shapely.geometry import shape
|
||||
|
||||
safe_dict["shape"] = shape
|
||||
except ImportError:
|
||||
pass
|
||||
return eval(expression, {"__builtins__": None}, safe_dict)
|
||||
|
||||
|
||||
def make_ld_context(context_items):
|
||||
"""Returns a JSON-LD Context object.
|
||||
|
||||
See https://json-ld.org/spec/latest/json-ld/."""
|
||||
ctx = {
|
||||
"@context": {
|
||||
"geojson": "http://ld.geojson.org/vocab#",
|
||||
"Feature": "geojson:Feature",
|
||||
"FeatureCollection": "geojson:FeatureCollection",
|
||||
"GeometryCollection": "geojson:GeometryCollection",
|
||||
"LineString": "geojson:LineString",
|
||||
"MultiLineString": "geojson:MultiLineString",
|
||||
"MultiPoint": "geojson:MultiPoint",
|
||||
"MultiPolygon": "geojson:MultiPolygon",
|
||||
"Point": "geojson:Point",
|
||||
"Polygon": "geojson:Polygon",
|
||||
"bbox": {"@container": "@list", "@id": "geojson:bbox"},
|
||||
"coordinates": "geojson:coordinates",
|
||||
"datetime": "http://www.w3.org/2006/time#inXSDDateTime",
|
||||
"description": "http://purl.org/dc/terms/description",
|
||||
"features": {"@container": "@set", "@id": "geojson:features"},
|
||||
"geometry": "geojson:geometry",
|
||||
"id": "@id",
|
||||
"properties": "geojson:properties",
|
||||
"start": "http://www.w3.org/2006/time#hasBeginning",
|
||||
"stop": "http://www.w3.org/2006/time#hasEnding",
|
||||
"title": "http://purl.org/dc/terms/title",
|
||||
"type": "@type",
|
||||
"when": "geojson:when",
|
||||
}
|
||||
}
|
||||
for item in context_items or []:
|
||||
t, uri = item.split("=")
|
||||
ctx[t.strip()] = uri.strip()
|
||||
return ctx
|
||||
|
||||
|
||||
def id_record(rec):
|
||||
"""Converts a record's id to a blank node id and returns the record."""
|
||||
rec["id"] = "_:f%s" % rec["id"]
|
||||
return rec
|
||||
|
||||
|
||||
def recursive_round(obj, precision):
|
||||
"""Recursively round coordinates."""
|
||||
if precision < 0:
|
||||
return obj
|
||||
if getattr(obj, "geometries", None):
|
||||
return Geometry(
|
||||
geometries=[recursive_round(part, precision) for part in obj.geometries]
|
||||
)
|
||||
elif getattr(obj, "coordinates", None):
|
||||
return Geometry(
|
||||
coordinates=[recursive_round(part, precision) for part in obj.coordinates]
|
||||
)
|
||||
if isinstance(obj, (int, float)):
|
||||
return round(obj, precision)
|
||||
else:
|
||||
return [recursive_round(part, precision) for part in obj]
|
||||
78
.venv/lib/python3.12/site-packages/fiona/fio/info.py
Normal file
78
.venv/lib/python3.12/site-packages/fiona/fio/info.py
Normal file
@@ -0,0 +1,78 @@
|
||||
"""$ fio info"""
|
||||
|
||||
|
||||
import logging
|
||||
import json
|
||||
|
||||
import click
|
||||
from cligj import indent_opt
|
||||
|
||||
import fiona
|
||||
import fiona.crs
|
||||
from fiona.errors import DriverError
|
||||
from fiona.fio import options, with_context_env
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@click.command()
|
||||
# One or more files.
|
||||
@click.argument('input', required=True)
|
||||
@click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer,
|
||||
help="Print information about a specific layer. The first "
|
||||
"layer is used by default. Layers use zero-based "
|
||||
"numbering when accessed by index.")
|
||||
@indent_opt
|
||||
# Options to pick out a single metadata item and print it as
|
||||
# a string.
|
||||
@click.option('--count', 'meta_member', flag_value='count',
|
||||
help="Print the count of features.")
|
||||
@click.option('-f', '--format', '--driver', 'meta_member', flag_value='driver',
|
||||
help="Print the format driver.")
|
||||
@click.option('--crs', 'meta_member', flag_value='crs',
|
||||
help="Print the CRS as a PROJ.4 string.")
|
||||
@click.option('--bounds', 'meta_member', flag_value='bounds',
|
||||
help="Print the boundary coordinates "
|
||||
"(left, bottom, right, top).")
|
||||
@click.option('--name', 'meta_member', flag_value='name',
|
||||
help="Print the datasource's name.")
|
||||
@options.open_opt
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def info(ctx, input, indent, meta_member, layer, open_options):
|
||||
"""
|
||||
Print information about a dataset.
|
||||
|
||||
When working with a multi-layer dataset the first layer is used by default.
|
||||
Use the '--layer' option to select a different layer.
|
||||
|
||||
"""
|
||||
with fiona.open(input, layer=layer, **open_options) as src:
|
||||
info = src.meta
|
||||
info.update(name=src.name)
|
||||
|
||||
try:
|
||||
info.update(bounds=src.bounds)
|
||||
except DriverError:
|
||||
info.update(bounds=None)
|
||||
logger.debug(
|
||||
"Setting 'bounds' to None - driver was not able to calculate bounds"
|
||||
)
|
||||
|
||||
try:
|
||||
info.update(count=len(src))
|
||||
except TypeError:
|
||||
info.update(count=None)
|
||||
logger.debug(
|
||||
"Setting 'count' to None/null - layer does not support counting"
|
||||
)
|
||||
|
||||
info["crs"] = src.crs.to_string()
|
||||
|
||||
if meta_member:
|
||||
if isinstance(info[meta_member], (list, tuple)):
|
||||
click.echo(" ".join(map(str, info[meta_member])))
|
||||
else:
|
||||
click.echo(info[meta_member])
|
||||
else:
|
||||
click.echo(json.dumps(info, indent=indent))
|
||||
43
.venv/lib/python3.12/site-packages/fiona/fio/insp.py
Normal file
43
.venv/lib/python3.12/site-packages/fiona/fio/insp.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""$ fio insp"""
|
||||
|
||||
|
||||
import code
|
||||
import sys
|
||||
|
||||
import click
|
||||
|
||||
import fiona
|
||||
from fiona.fio import options, with_context_env
|
||||
|
||||
|
||||
@click.command(short_help="Open a dataset and start an interpreter.")
|
||||
@click.argument("src_path", required=True)
|
||||
@click.option(
|
||||
"--ipython", "interpreter", flag_value="ipython", help="Use IPython as interpreter."
|
||||
)
|
||||
@options.open_opt
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def insp(ctx, src_path, interpreter, open_options):
|
||||
"""Open a collection within an interactive interpreter."""
|
||||
banner = (
|
||||
"Fiona %s Interactive Inspector (Python %s)\n"
|
||||
'Type "src.schema", "next(src)", or "help(src)" '
|
||||
"for more information."
|
||||
% (fiona.__version__, ".".join(map(str, sys.version_info[:3])))
|
||||
)
|
||||
|
||||
with fiona.open(src_path, **open_options) as src:
|
||||
scope = locals()
|
||||
if not interpreter:
|
||||
code.interact(banner, local=scope)
|
||||
elif interpreter == "ipython":
|
||||
import IPython
|
||||
|
||||
IPython.InteractiveShell.banner1 = banner
|
||||
IPython.start_ipython(argv=[], user_ns=scope)
|
||||
else:
|
||||
raise click.ClickException(
|
||||
"Interpreter {} is unsupported or missing "
|
||||
"dependencies".format(interpreter)
|
||||
)
|
||||
112
.venv/lib/python3.12/site-packages/fiona/fio/load.py
Normal file
112
.venv/lib/python3.12/site-packages/fiona/fio/load.py
Normal file
@@ -0,0 +1,112 @@
|
||||
"""$ fio load"""
|
||||
|
||||
from functools import partial
|
||||
|
||||
import click
|
||||
import cligj
|
||||
|
||||
import fiona
|
||||
from fiona.fio import options, with_context_env
|
||||
from fiona.model import Feature, Geometry
|
||||
from fiona.schema import FIELD_TYPES_MAP_REV
|
||||
from fiona.transform import transform_geom
|
||||
|
||||
|
||||
@click.command(short_help="Load GeoJSON to a dataset in another format.")
|
||||
@click.argument("output", required=True)
|
||||
@click.option("-f", "--format", "--driver", "driver", help="Output format driver name.")
|
||||
@options.src_crs_opt
|
||||
@click.option(
|
||||
"--dst-crs",
|
||||
"--dst_crs",
|
||||
help="Destination CRS. Defaults to --src-crs when not given.",
|
||||
)
|
||||
@cligj.features_in_arg
|
||||
@click.option(
|
||||
"--layer",
|
||||
metavar="INDEX|NAME",
|
||||
callback=options.cb_layer,
|
||||
help="Load features into specified layer. Layers use "
|
||||
"zero-based numbering when accessed by index.",
|
||||
)
|
||||
@options.creation_opt
|
||||
@options.open_opt
|
||||
@click.option("--append", is_flag=True, help="Open destination layer in append mode.")
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def load(
|
||||
ctx,
|
||||
output,
|
||||
driver,
|
||||
src_crs,
|
||||
dst_crs,
|
||||
features,
|
||||
layer,
|
||||
creation_options,
|
||||
open_options,
|
||||
append,
|
||||
):
|
||||
"""Load features from JSON to a file in another format.
|
||||
|
||||
The input is a GeoJSON feature collection or optionally a sequence of
|
||||
GeoJSON feature objects.
|
||||
|
||||
"""
|
||||
dst_crs = dst_crs or src_crs
|
||||
|
||||
if src_crs and dst_crs and src_crs != dst_crs:
|
||||
transformer = partial(
|
||||
transform_geom, src_crs, dst_crs, antimeridian_cutting=True
|
||||
)
|
||||
else:
|
||||
|
||||
def transformer(x):
|
||||
return Geometry.from_dict(**x)
|
||||
|
||||
def feature_gen():
|
||||
"""Convert stream of JSON to features.
|
||||
|
||||
Yields
|
||||
------
|
||||
Feature
|
||||
|
||||
"""
|
||||
try:
|
||||
for feat in features:
|
||||
feat["geometry"] = transformer(Geometry.from_dict(**feat["geometry"]))
|
||||
yield Feature.from_dict(**feat)
|
||||
except TypeError:
|
||||
raise click.ClickException("Invalid input.")
|
||||
|
||||
source = feature_gen()
|
||||
|
||||
# Use schema of first feature as a template.
|
||||
# TODO: schema specified on command line?
|
||||
try:
|
||||
first = next(source)
|
||||
except TypeError:
|
||||
raise click.ClickException("Invalid input.")
|
||||
|
||||
# print(first, first.geometry)
|
||||
schema = {"geometry": first.geometry.type}
|
||||
schema["properties"] = {
|
||||
k: FIELD_TYPES_MAP_REV.get(type(v)) or "str"
|
||||
for k, v in first.properties.items()
|
||||
}
|
||||
|
||||
if append:
|
||||
opener = fiona.open(output, "a", layer=layer, **open_options)
|
||||
else:
|
||||
opener = fiona.open(
|
||||
output,
|
||||
"w",
|
||||
driver=driver,
|
||||
crs=dst_crs,
|
||||
schema=schema,
|
||||
layer=layer,
|
||||
**creation_options
|
||||
)
|
||||
|
||||
with opener as dst:
|
||||
dst.write(first)
|
||||
dst.writerecords(source)
|
||||
24
.venv/lib/python3.12/site-packages/fiona/fio/ls.py
Normal file
24
.venv/lib/python3.12/site-packages/fiona/fio/ls.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""$ fiona ls"""
|
||||
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
from cligj import indent_opt
|
||||
|
||||
import fiona
|
||||
from fiona.fio import options, with_context_env
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.argument('input', required=True)
|
||||
@indent_opt
|
||||
@options.open_opt
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def ls(ctx, input, indent, open_options):
|
||||
"""
|
||||
List layers in a datasource.
|
||||
"""
|
||||
result = fiona.listlayers(input, **open_options)
|
||||
click.echo(json.dumps(result, indent=indent))
|
||||
73
.venv/lib/python3.12/site-packages/fiona/fio/main.py
Normal file
73
.venv/lib/python3.12/site-packages/fiona/fio/main.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
Main click group for the CLI. Needs to be isolated for entry-point loading.
|
||||
"""
|
||||
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import click
|
||||
from click_plugins import with_plugins
|
||||
from cligj import verbose_opt, quiet_opt
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
from importlib_metadata import entry_points
|
||||
else:
|
||||
from importlib.metadata import entry_points
|
||||
|
||||
import fiona
|
||||
from fiona import __version__ as fio_version
|
||||
from fiona.session import AWSSession, DummySession
|
||||
|
||||
|
||||
def configure_logging(verbosity):
|
||||
log_level = max(10, 30 - 10 * verbosity)
|
||||
logging.basicConfig(stream=sys.stderr, level=log_level)
|
||||
|
||||
|
||||
@with_plugins(
|
||||
itertools.chain(
|
||||
entry_points(group="fiona.fio_commands"),
|
||||
entry_points(group="fiona.fio_plugins"),
|
||||
)
|
||||
)
|
||||
@click.group()
|
||||
@verbose_opt
|
||||
@quiet_opt
|
||||
@click.option(
|
||||
"--aws-profile",
|
||||
help="Select a profile from the AWS credentials file")
|
||||
@click.option(
|
||||
"--aws-no-sign-requests",
|
||||
is_flag=True,
|
||||
help="Make requests anonymously")
|
||||
@click.option(
|
||||
"--aws-requester-pays",
|
||||
is_flag=True,
|
||||
help="Requester pays data transfer costs")
|
||||
@click.version_option(fio_version)
|
||||
@click.version_option(fiona.__gdal_version__, '--gdal-version',
|
||||
prog_name='GDAL')
|
||||
@click.version_option(sys.version, '--python-version', prog_name='Python')
|
||||
@click.pass_context
|
||||
def main_group(
|
||||
ctx, verbose, quiet, aws_profile, aws_no_sign_requests,
|
||||
aws_requester_pays):
|
||||
"""Fiona command line interface.
|
||||
"""
|
||||
verbosity = verbose - quiet
|
||||
configure_logging(verbosity)
|
||||
ctx.obj = {}
|
||||
ctx.obj["verbosity"] = verbosity
|
||||
ctx.obj["aws_profile"] = aws_profile
|
||||
envopts = {"CPL_DEBUG": (verbosity > 2)}
|
||||
if aws_profile or aws_no_sign_requests:
|
||||
session = AWSSession(
|
||||
profile_name=aws_profile,
|
||||
aws_unsigned=aws_no_sign_requests,
|
||||
requester_pays=aws_requester_pays,
|
||||
)
|
||||
else:
|
||||
session = DummySession()
|
||||
ctx.obj["env"] = fiona.Env(session=session, **envopts)
|
||||
96
.venv/lib/python3.12/site-packages/fiona/fio/options.py
Normal file
96
.venv/lib/python3.12/site-packages/fiona/fio/options.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""Common commandline options for `fio`"""
|
||||
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
import click
|
||||
|
||||
|
||||
src_crs_opt = click.option('--src-crs', '--src_crs', help="Source CRS.")
|
||||
dst_crs_opt = click.option('--dst-crs', '--dst_crs', help="Destination CRS.")
|
||||
|
||||
|
||||
def cb_layer(ctx, param, value):
|
||||
"""Let --layer be a name or index."""
|
||||
if value is None or not value.isdigit():
|
||||
return value
|
||||
else:
|
||||
return int(value)
|
||||
|
||||
|
||||
def cb_multilayer(ctx, param, value):
|
||||
"""
|
||||
Transform layer options from strings ("1:a,1:b", "2:a,2:c,2:z") to
|
||||
{
|
||||
'1': ['a', 'b'],
|
||||
'2': ['a', 'c', 'z']
|
||||
}
|
||||
"""
|
||||
out = defaultdict(list)
|
||||
for raw in value:
|
||||
for v in raw.split(','):
|
||||
ds, name = v.split(':')
|
||||
out[ds].append(name)
|
||||
return out
|
||||
|
||||
|
||||
def cb_key_val(ctx, param, value):
|
||||
"""
|
||||
click callback to validate `--opt KEY1=VAL1 --opt KEY2=VAL2` and collect
|
||||
in a dictionary like the one below, which is what the CLI function receives.
|
||||
If no value or `None` is received then an empty dictionary is returned.
|
||||
|
||||
{
|
||||
'KEY1': 'VAL1',
|
||||
'KEY2': 'VAL2'
|
||||
}
|
||||
|
||||
Note: `==VAL` breaks this as `str.split('=', 1)` is used.
|
||||
|
||||
"""
|
||||
if not value:
|
||||
return {}
|
||||
else:
|
||||
out = {}
|
||||
for pair in value:
|
||||
if "=" not in pair:
|
||||
raise click.BadParameter(
|
||||
f"Invalid syntax for KEY=VAL arg: {pair}"
|
||||
)
|
||||
else:
|
||||
k, v = pair.split("=", 1)
|
||||
k = k.lower()
|
||||
v = v.lower()
|
||||
out[k] = None if v.lower() in ["none", "null", "nil", "nada"] else v
|
||||
return out
|
||||
|
||||
|
||||
def validate_multilayer_file_index(files, layerdict):
|
||||
"""
|
||||
Ensure file indexes provided in the --layer option are valid
|
||||
"""
|
||||
for key in layerdict.keys():
|
||||
if key not in [str(k) for k in range(1, len(files) + 1)]:
|
||||
layer = key + ":" + layerdict[key][0]
|
||||
raise click.BadParameter(f"Layer {layer} does not exist")
|
||||
|
||||
|
||||
creation_opt = click.option(
|
||||
"--co",
|
||||
"--profile",
|
||||
"creation_options",
|
||||
metavar="NAME=VALUE",
|
||||
multiple=True,
|
||||
callback=cb_key_val,
|
||||
help="Driver specific creation options. See the documentation for the selected output driver for more information.",
|
||||
)
|
||||
|
||||
|
||||
open_opt = click.option(
|
||||
"--oo",
|
||||
"open_options",
|
||||
metavar="NAME=VALUE",
|
||||
multiple=True,
|
||||
callback=cb_key_val,
|
||||
help="Driver specific open options. See the documentation for the selected output driver for more information.",
|
||||
)
|
||||
30
.venv/lib/python3.12/site-packages/fiona/fio/rm.py
Normal file
30
.venv/lib/python3.12/site-packages/fiona/fio/rm.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import click
|
||||
import logging
|
||||
|
||||
import fiona
|
||||
from fiona.fio import with_context_env
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@click.command(help="Remove a datasource or an individual layer.")
|
||||
@click.argument("input", required=True)
|
||||
@click.option("--layer", type=str, default=None, required=False, help="Name of layer to remove.")
|
||||
@click.option("--yes", is_flag=True)
|
||||
@click.pass_context
|
||||
@with_context_env
|
||||
def rm(ctx, input, layer, yes):
|
||||
if layer is None:
|
||||
kind = "datasource"
|
||||
else:
|
||||
kind = "layer"
|
||||
|
||||
if not yes:
|
||||
click.confirm(f"The {kind} will be removed. Are you sure?", abort=True)
|
||||
|
||||
try:
|
||||
fiona.remove(input, layer=layer)
|
||||
except Exception:
|
||||
logger.exception(f"Failed to remove {kind}.")
|
||||
raise click.Abort()
|
||||
Reference in New Issue
Block a user