如何解决Fiona 文件丢失-Python
我在 python 中有其他人的以下代码,我在下面附上的这段代码产生了以下错误消息
read_in_kml('1.kml')
File "/Users/becker/Desktop/elevdata/read_in_kml.py",line 30,in read_in_kml
gpd_df = gpd_df.append(gpd.read_file(path,driver='KML'),ignore_index=True)
File "/Users/becker/opt/anaconda3/lib/python3.8/site-packages/geopandas/io/file.py",line 160,in _read_file
with reader(path_or_bytes,**kwargs) as features:
File "/Users/becker/opt/anaconda3/lib/python3.8/site-packages/fiona/env.py",line 400,in wrapper
return f(*args,**kwargs)
File "/Users/becker/opt/anaconda3/lib/python3.8/site-packages/fiona/__init__.py",line 256,in open
c = Collection(path,mode,driver=driver,encoding=encoding,File "/Users/becker/opt/anaconda3/lib/python3.8/site-packages/fiona/collection.py",line 162,in __init__
self.session.start(self,**kwargs)
File "fiona/ogrext.pyx",line 540,in fiona.ogrext.Session.start
File "fiona/_shim.pyx",line 81,in fiona._shim.gdal_open_vector
DriverError: 1: No such file or directory
所以这个fiona文件好像有问题。我不太确定这意味着什么,因为我对 python 很陌生。是否缺少我必须安装的东西或代码有问题。我必须安装一个包吗? 这是代码:
import geopandas as gpd
import shapely
import json
import pandas as pd
from json.decoder import JSONDecodeError
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
def read_in_kml(kml_paths : list) -> gpd.GeoDataFrame:
"""
Function reads in the KML files obtained using labeling scheme,which was used in the AuBeSa
project and returns a geopandas dataframe with polygons,and attributes provided in
labelling scheme.
NOTE: to be adapted,when labeling scheme changes
: param kml_list: list of paths to a kml files
:return: geopandas GeoDataframe cotaining the polygons and its attributes
"""
# # Enable fiona driver
gpd.io.file.fiona.drvsupport.supported_drivers['KML'] = 'rw'
gpd_df=gpd.GeoDataFrame()
for path in kml_paths:
gpd_df = gpd_df.append(gpd.read_file(path,ignore_index=True)
gpd_df=gpd_df.to_crs("epsg:4326")
#get rid of z coordinate
gpd_df=gpd_df.set_geometry(
gpd_df.geometry.map(
lambda polygon: shapely.ops.transform(lambda x,y,*_: (x,y),polygon)
)
)
#turn description into attribute columns
for remove in ["<div>","</div>","<br>"," "," "]:
gpd_df["Description"]=gpd_df["Description"].str.replace(remove,"")
for exchange in [8221,8220]:
gpd_df["Description"]=gpd_df["Description"].str.replace(chr(exchange),chr(34))
# get description attributes for scheme 1
scheme_1_gdf=gpd_df[~gpd_df["Description"].str.contains("{")]
if len(scheme_1_gdf):
scheme_1_values=["type","visibility","date"]
df=scheme_1_gdf.assign(**dict(zip(scheme_1_values,zip(*scheme_1_gdf["Description"].str.split(",")))))
gpd_df.loc[~gpd_df["Description"].str.contains("{"),scheme_1_values]=df
# get description attributes for scheme 2
scheme_2_gdf=gpd_df.loc[gpd_df["Description"].str.contains("{")]
if len(scheme_2_gdf):
try:
#langsam
df=scheme_2_gdf["Description"].apply(lambda x: pd.Series(json.loads(x)))
gpd_df.loc[gpd_df["Description"].str.contains("{"),df.columns]=df
##faster method
#scheme_2_values=["type","date","granularity"]
#df=scheme_2_gdf.assign(**dict(zip(scheme_2_values,zip(*scheme_2_gdf["Description"].apply(lambda x: x.values())))))
#gpd_df.loc[gpd_df["Description"].str.contains("{"),df.columns]=df
except JSONDecodeError:
for _,row in scheme_2_gdf.iterrows():
try:
json.loads(row["Description"])
except JSONDecodeError:
logger.error("Shape {} has an invalid description: {}".format(row["Name"],row["Description"]))
#probably better way to check for integer
if "visibility" in gpd_df.dtypes and gpd_df.dtypes.visibility in ["object"]:
try:
gpd_df["visibility"]=gpd_df["visibility"].astype(int)
except:
logger.error("Visibility can not be correctly assigned as integer")
#check if dates are valid
if "date" in gpd_df.dtypes:
pd_dates=pd.to_datetime(gpd_df["date"])
if pd_dates.isnull().any():
logger.error("Dates {} incorrectly assigned".format(gpd_df[pd_dates.isnull()]["Name"].values))
#get rid of Description
gpd_df=gpd_df.drop("Description",axis=1)
# add missing fields for img_polygon_associator
gpd_df['have_img?'] = False
gpd_df['have_img_downloaded?'] = False
gpd_df['download_exception'] = str(None)
# rename Name to polygon_name,make to index,set type to str
gpd_df.rename(columns={'Name': 'polygon_name'},inplace=True)
gpd_df['polygon_name'] = gpd_df['polygon_name'].astype('str')
gpd_df.set_index('polygon_name',inplace=True)
gpd_df.index = gpd_df.index.map(str)
#create correct columns
classes=list(set.union(*gpd_df["type"].apply(lambda x: set(x.keys()))))
# #write into columns
gpd_df=pd.concat([gpd_df,gpd_df["type"].apply(lambda x: pd.Series(x))],axis=1)
gpd_df[classes]=gpd_df[classes].fillna(0.0)
# #drop original type dict column
gpd_df=gpd_df.drop("type",axis=1)
# obey naming convention
gpd_df.rename(columns={cl:"prob_seg_class_{}".format(cl) for cl in classes},inplace=True)
return gpd_df
有人知道 fiona 的问题是什么吗?我是否必须安装任何东西或代码有问题?
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。