From a74117d10a3153366cbf5661ef91c4ebbd586983 Mon Sep 17 00:00:00 2001 From: sbastiangarzon Date: Mon, 4 Jan 2021 17:08:20 +0100 Subject: [PATCH 1/6] fixes #109, test including CRS and PEP8 style --- geoextent/__init__.py | 2 +- geoextent/lib/extent.py | 38 +- geoextent/lib/handleCSV.py | 31 +- geoextent/lib/handleRaster.py | 4 +- geoextent/lib/handleVector.py | 41 +- geoextent/lib/helpfunctions.py | 5 +- tests/help_functions_test.py | 14 +- tests/test_api.py | 58 +- tests/test_api_csv.py | 106 +- tests/test_api_geojson.py | 21 +- tests/test_api_geotiff.py | 8 +- tests/test_api_shapefile.py | 19 +- tests/test_cli.py | 122 +- tests/testdata/kml/TimeStamp_example.kml | 3293 ++++++++++++++++++++++ 14 files changed, 3584 insertions(+), 178 deletions(-) create mode 100644 tests/testdata/kml/TimeStamp_example.kml diff --git a/geoextent/__init__.py b/geoextent/__init__.py index 6294824..74c1fa2 100644 --- a/geoextent/__init__.py +++ b/geoextent/__init__.py @@ -1,3 +1,3 @@ name = "geoextent" -__version__ = '0.5.0' +__version__ = '0.5.1' diff --git a/geoextent/lib/extent.py b/geoextent/lib/extent.py index 921cd42..373ec2e 100644 --- a/geoextent/lib/extent.py +++ b/geoextent/lib/extent.py @@ -19,22 +19,20 @@ def computeBboxInWGS84(module, path): input "path": type string, path to file \n returns a bounding box, type list, length = 4 , type = float, schema = [min(longs), min(lats), max(longs), max(lats)], the boudning box has either its original crs or WGS84 (transformed). ''' - bbox_in_orig_crs = module.getBoundingBox(path) + spatial_extent_org = module.getBoundingBox(path) try: - # TODO: Add function using to reproject coordinates system - if module.fileType == "application/shp": - crs = 'None' - return bbox_in_orig_crs + + if spatial_extent_org['crs'] != str(hf.WGS84_EPSG_ID): + bbox_WGS84 = hf.transformingArrayIntoWGS84(spatial_extent_org['crs'], spatial_extent_org['bbox']) else: - crs = module.getCRS(path) + bbox_WGS84 = spatial_extent_org['bbox'] except: - pass - if 'crs' in locals() and crs and bbox_in_orig_crs: - bbox_transformed = hf.transformingArrayIntoWGS84(crs, bbox_in_orig_crs) - return bbox_transformed - else: raise Exception("The bounding box could not be related to a CRS") + spatial_extent = {'bbox': bbox_WGS84, 'crs': str(hf.WGS84_EPSG_ID)} + + return spatial_extent + def fromDirectory(path, bbox=False, tbox=False): ''' TODO: implement @@ -148,7 +146,9 @@ def run(self): if self.task == "bbox": try: if bbox: - metadata["bbox"] = computeBboxInWGS84(usedModule, filePath) + spatial_extent = computeBboxInWGS84(usedModule, filePath) + metadata["bbox"] = spatial_extent['bbox'] + metadata["crs"] = spatial_extent['crs'] except Exception as e: logger.warning("Error for {} extracting bbox:\n{}".format(filePath, str(e))) elif self.task == "tbox": @@ -163,17 +163,6 @@ def run(self): metadata["tbox"] = extract_tbox except Exception as e: logger.warning("Error extracting tbox, time format not found \n {}:".format(str(e))) - elif self.task == "crs": - try: - # the CRS is not necessarily required - if bbox and hasattr(usedModule, 'getCRS'): - metadata["crs"] = usedModule.getCRS(filePath) - elif tbox and hasattr(usedModule, 'getCRS'): - metadata["crs"] = usedModule.getCRS(filePath) - else: - logger.debug("The CRS cannot be extracted from the file {}".format(filePath)) - except Exception as e: - logger.warning("Error for {} extracting CRS:\n{}".format(filePath, str(e))) else: raise Exception("Unsupported thread task {}".format(self.task)) @@ -181,17 +170,14 @@ def run(self): thread_bbox_except = thread("bbox") thread_temp_except = thread("tbox") - thread_crs_except = thread("crs") logger.debug("Starting 3 threads for extraction.") thread_bbox_except.start() thread_temp_except.start() - thread_crs_except.start() thread_bbox_except.join() thread_temp_except.join() - thread_crs_except.join() logger.debug("Extraction finished: {}".format(str(metadata))) return metadata diff --git a/geoextent/lib/handleCSV.py b/geoextent/lib/handleCSV.py index 30b35da..d7d5492 100644 --- a/geoextent/lib/handleCSV.py +++ b/geoextent/lib/handleCSV.py @@ -52,32 +52,32 @@ def getBoundingBox(filePath): elements = [] for x in daten: elements.append(x) - - spatialExtent= [] - spatialLatExtent=[] - spatialLonExtent=[] - spatialLatExtent= hf.searchForParameters(elements, search['latitude'], exp_data= 'numeric') + spatialLatExtent = hf.searchForParameters(elements, search['latitude'], exp_data= 'numeric') - minlat= None - maxlat= None + minlat = None + maxlat = None if spatialLatExtent is None: pass else: - minlat= (min(spatialLatExtent)) - maxlat= (max(spatialLatExtent)) + minlat = (min(spatialLatExtent)) + maxlat = (max(spatialLatExtent)) - spatialLonExtent= hf.searchForParameters(elements, search['longitude'], exp_data= 'numeric') + spatialLonExtent = hf.searchForParameters(elements, search['longitude'], exp_data='numeric') if spatialLonExtent is None: raise Exception('The csv file from ' + filePath + ' has no BoundingBox') else: - minlon= (min(spatialLonExtent)) - maxlon= (max(spatialLonExtent)) - - spatialExtent= [minlon,minlat,maxlon,maxlat] - if not spatialExtent: + minlon = (min(spatialLonExtent)) + maxlon = (max(spatialLonExtent)) + + bbox = [minlon, minlat, maxlon, maxlat] + crs = getCRS(filePath) + spatialExtent = {"bbox": bbox, "crs": crs} + logger.debug(bbox) + if not bbox or not crs: raise Exception("Bounding box could not be extracted") + return spatialExtent def getTemporalExtent(filePath, num_sample): @@ -86,7 +86,6 @@ def getTemporalExtent(filePath, num_sample): returns temporal extent of the file: type list, length = 2, both entries have the type str, temporalExtent[0] <= temporalExtent[1] ''' - with open(filePath) as csv_file: # To get delimiter either comma or simecolon daten = hf.getDelimiter(csv_file) diff --git a/geoextent/lib/handleRaster.py b/geoextent/lib/handleRaster.py index b9affed..f3e7af8 100644 --- a/geoextent/lib/handleRaster.py +++ b/geoextent/lib/handleRaster.py @@ -75,7 +75,9 @@ def getBoundingBox(filePath): if old_cs.GetAxisMappingStrategy() == 1: bbox = [latlongmin[1], latlongmin[0], latlongmax[1], latlongmax[0]] - return bbox + spatialExtent = {"bbox": bbox, "crs": str(hf.WGS84_EPSG_ID)} + + return spatialExtent def getCRS(filePath): diff --git a/geoextent/lib/handleVector.py b/geoextent/lib/handleVector.py index d88e235..8963909 100644 --- a/geoextent/lib/handleVector.py +++ b/geoextent/lib/handleVector.py @@ -8,7 +8,7 @@ null_island = [0] * 4 fileType = "application/shp" -search = {"time": ["(.)*timestamp(.)*", "(.)*datetime(.)*", "(.)*time(.)*", "date$", "^date","^begin"]} +search = {"time": ["(.)*timestamp(.)*", "(.)*datetime(.)*", "(.)*time(.)*", "date$", "^date", "^begin"]} logger = logging.getLogger("geoextent") @@ -38,19 +38,6 @@ def checkFileSupported(filepath): return False -def getCRS(filepath): - dataset = ogr.Open(filepath) - layer = dataset.GetLayer() - - try: - spatialRef = layer.GetSpatialRef().GetAttrValue("GEOGCS|AUTHORITY", 1) - except: - logger.debug("File {} does not have a coordinate reference system !".format(filepath)) - spatialRef = None - - return spatialRef - - def getTemporalExtent(filepath): ''' extracts temporal extent of the vector file \n input "path": type string, file path to vector file @@ -58,13 +45,14 @@ def getTemporalExtent(filepath): datasource = ogr.Open(filepath) layer_count = datasource.GetLayerCount() - logger.debug("{} contains {} layers".format(filepath,layer_count)) + logger.debug("{} contains {} layers".format(filepath, layer_count)) datetime_list = [] for layer in datasource: logger.debug("{} : Extracting temporal extent from layer {} ".format(filepath, layer)) layerDefinition = layer.GetLayerDefn() field_names = [] + logger.debug(layerDefinition.GetFieldCount()) for i in range(layerDefinition.GetFieldCount()): field_names.append(layerDefinition.GetFieldDefn(i).GetName()) @@ -75,31 +63,35 @@ def getTemporalExtent(filepath): match = term.search(j) if match is not None: match_list.append(j) - logger.debug(match_list) + logger.debug("Features name match: {}".format(match_list)) if len(match_list) == 0: - logger.debug("File:{} /Layer: {}: No matched fields for temporal extent".format(filepath,layer)) + logger.debug("File:{} /Layer: {}: No matched fields for temporal extent".format(filepath, layer)) pass else: datetime_list = [] for time_feature in match_list: time_list = [] - logger.debug(time_feature) + logger.debug("Time feature: {}".format(time_feature)) for feat in layer: time = feat.GetField(time_feature) + logger.debug("time {}".format(time)) if time is not None: time_list.append(time) layer.ResetReading() if len(time_list) != 0: + logger.debug("Time_list: {}".format(time_list)) parsed_time = hf.date_parser(time_list) if parsed_time is not None: datetime_list.extend(parsed_time) else: - logger.debug('File:{} /Layer: {}: : Matched temporal extent "{}" field do not have recognizable time format'.format(filepath,layer,time_feature)) + logger.debug('File:{} /Layer: {}: : Matched temporal extent "{}"' + 'field do not have recognizable time format'.format(filepath, layer, time_feature)) pass else: - logger.debug("File:{} / Layer: {}: No values found in {} fields.".format(filepath,layer, time_feature)) + logger.debug("File:{} / Layer: {}: No values found in {} fields." + .format(filepath, layer, time_feature)) pass logger.debug(match_list) @@ -119,6 +111,7 @@ def getBoundingBox(filepath): """ datasource = ogr.Open(filepath) geo_dict = {} + for layer in datasource: layer_name = layer.GetDescription() ext = layer.GetExtent() @@ -141,7 +134,11 @@ def getBoundingBox(filepath): bbox_merge = hf.bbox_merge(geo_dict, filepath) + if bbox_merge is not None: + crs = str(hf.WGS84_EPSG_ID) + else: + crs = None + spatialExtent = {"bbox": bbox_merge, "crs": crs} - - return bbox_merge + return spatialExtent diff --git a/geoextent/lib/helpfunctions.py b/geoextent/lib/helpfunctions.py index 34fa9b3..77b09d3 100644 --- a/geoextent/lib/helpfunctions.py +++ b/geoextent/lib/helpfunctions.py @@ -38,8 +38,8 @@ def getAllRowElements(rowname, elements, exp_data=None): elif exp_data == 'numeric': try: - list(map(float, values)) - return values + values_num = list(map(float, values)) + return values_num except: return None @@ -92,6 +92,7 @@ def transformingIntoWGS84(crs, coordinate): point = ogr.CreateGeometryFromWkt(point) point.Transform(transform) + return [point.GetX(), point.GetY()] diff --git a/tests/help_functions_test.py b/tests/help_functions_test.py index a9d9d57..0af6666 100644 --- a/tests/help_functions_test.py +++ b/tests/help_functions_test.py @@ -2,15 +2,18 @@ import os tolerance = 1e-3 + + def create_zip(folder, zipfile_temp): - ''' + """ Function purpose: create a zip file Input: filepath Source: https://thispointer.com/python-how-to-create-a-zip-archive-from-multiple-files-or-directory/ - ''' + """ + with zipfile.ZipFile(zipfile_temp, "w") as zipObj: # Iterate over all the files in directory - for folderName, subfolders, filenames in os.walk(folder): + for folderName, sub_folders, filenames in os.walk(folder): for filename in filenames: # create complete filepath of file in directory file_root = os.path.abspath(folderName) @@ -19,6 +22,11 @@ def create_zip(folder, zipfile_temp): def parse_coordinates(result): + """ + Function purpose: parse coordinates from console result into a list + Input: string + Output: list + """ bboxStr = result[result.find("[") + 1:result.find("]")] bboxList = [float(i) for i in bboxStr.split(',')] return bboxList diff --git a/tests/test_api.py b/tests/test_api.py index d5cde0b..259ee17 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -4,7 +4,7 @@ import pytest import geoextent.lib.extent as geoextent from help_functions_test import create_zip, tolerance -import geoextent.__main__ as geoextent_main + @pytest.mark.skip(reason="file format not implemented yet") def test_defaults(): @@ -32,12 +32,23 @@ def test_netcdf_extract_bbox(): def test_kml_extract_bbox(): result = geoextent.fromFile("tests/testdata/kml/aasee.kml", bbox=True) assert "bbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([7.594213, 51.942465, 7.618246, 51.957278], abs=tolerance) + assert result["crs"] == "4326" + + +def test_kml_extract_tbox(): + result = geoextent.fromFile("tests/testdata/kml/TimeStamp_example.kml", bbox=True) + assert "tbox" in result + assert result["tbox"] == ['2007-01-14', '2007-01-14'] def test_gpkg_extract_bbox(): result = geoextent.fromFile("tests/testdata/nc/nc.gpkg", bbox=True) + assert "bbox" in result + assert "crs" in result assert result['bbox'] == pytest.approx([-84.323835, 33.882102, -75.456585, 36.589757], abs=tolerance) + assert result["crs"] == "4326" @pytest.mark.skipif("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", @@ -45,13 +56,17 @@ def test_gpkg_extract_bbox(): def test_gml_extract_bbox(): result = geoextent.fromFile("tests/testdata/gml/clc_1000_PT.gml", bbox=True) assert "bbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([-17.542069, 32.39669, -6.959389, 39.301139], abs=tolerance) + assert result["crs"] == "4326" def test_gpx_extract_bbox(): result = geoextent.fromFile("tests/testdata/gpx/gpx1.1_with_all_fields.gpx", bbox=True) assert "bbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([-20.2, 10.0, 46.7, 14.0], abs=tolerance) + assert result["crs"] == "4326" def test_gpx_extract_tbox(): @@ -59,39 +74,50 @@ def test_gpx_extract_tbox(): assert "tbox" in result assert result["tbox"] == ['2013-01-01', '2013-01-01'] -@pytest.mark.skipif(sys.platform == "darwin",reason="MacOS recognize file") + +@pytest.mark.skipif(sys.platform == "darwin", reason="MacOS recognize file") def test_gpx_format_error_file(): result = geoextent.fromFile("tests/testdata/gpx/gpx1.1_with_all_fields_error_format.gpx", tbox=True) - assert result == None + assert result is None -def test_empty_folder(): +def test_empty_folder(): with tempfile.TemporaryDirectory() as temp: result = geoextent.fromDirectory(temp, bbox=True, tbox=True) assert "bbox" not in result assert "tbox" not in result + def test_folder_one_file(): result = geoextent.fromDirectory('tests/testdata/folders/folder_one_file', bbox=True, tbox=True) assert "bbox" in result assert "tbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([7.601680, 51.948814, 7.647256, 51.974624], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2018-11-14', '2018-11-14'] + def test_folder_multiple_files(): result = geoextent.fromDirectory('tests/testdata/folders/folder_two_files', bbox=True, tbox=True) assert "bbox" in result assert "tbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([2.052333, 41.317038, 7.647256, 51.974624], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2018-11-14', '2019-09-11'] + def test_folder_nested_files(): result = geoextent.fromDirectory('tests/testdata/folders/nested_folder', bbox=True, tbox=True) assert "bbox" in result assert "tbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([7.601680, 34.7, 142.0, 51.974624], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-04-08', '2020-02-06'] + def test_zipfile_unsupported_file(): with tempfile.TemporaryDirectory() as tmp_dir: f = open(tmp_dir + "/unsupported_file.txt", "a") @@ -103,47 +129,47 @@ def test_zipfile_unsupported_file(): assert "bbox" not in result assert "tbox" not in result + def test_zipfile_one_file(): folder_name = "tests/testdata/folders/folder_one_file" with tempfile.NamedTemporaryFile(suffix=".zip") as tmp: - create_zip(folder_name,tmp) + create_zip(folder_name, tmp) result = geoextent.fromDirectory(tmp.name, bbox=True, tbox=True) assert result["bbox"] == pytest.approx([7.601680, 51.948814, 7.647256, 51.974624], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2018-11-14', '2018-11-14'] + def test_zipfile_nested_folders(): folder_name = "tests/testdata/folders/nested_folder" with tempfile.NamedTemporaryFile(suffix=".zip") as tmp: - create_zip(folder_name,tmp) + create_zip(folder_name, tmp) result = geoextent.fromDirectory(tmp.name, bbox=True, tbox=True) assert result["bbox"] == pytest.approx([7.601680, 34.7, 142.0, 51.974624], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-04-08', '2020-02-06'] + @pytest.mark.skip(reason="file format not implemented yet") def test_netcdf_extract_time(): assert geoextent.fromFile("tests/testdata/nc/ECMWF_ERA-40_subset.nc", tbox=True) == ['2002-07-01', '2002-07-31'] -@pytest.mark.skipif("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", - reason="Travis GDAL version outdated") def test_gml_extract_time(): result = geoextent.fromFile("tests/testdata/gml/clc_1000_PT.gml", tbox=True) assert result["tbox"] == ['2005-12-31', '2013-11-30'] + @pytest.mark.skip(reason="file format not implemented yet") def test_netcdf_extract_bbox_time(): assert geoextent.fromFile("tests/testdata/nc/ECMWF_ERA-40_subset.nc", bbox=True, tbox=True) == [ [-90.0, 0.0, 90.0, 357.5], ['2002-07-01', '2002-07-31']] -def test_kml_extract_bbox(): - result = geoextent.fromFile("tests/testdata/kml/aasee.kml", bbox=True) - assert result['bbox'] == pytest.approx([7.594213, 51.942465, 7.618246, 51.957278], abs=tolerance) - - def test_gpkg_extract_bboxs(): result = geoextent.fromFile("tests/testdata/geopackage/nc.gpkg", bbox=True) assert result['bbox'] == pytest.approx([-84.323835, 33.882102, -75.456585, 36.589757], abs=tolerance) + assert result["crs"] == "4326" @pytest.mark.skipif("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", @@ -151,15 +177,19 @@ def test_gpkg_extract_bboxs(): def test_gml_extract_bbox_time(): result = geoextent.fromFile("tests/testdata/gml/clc_1000_PT.gml", bbox=True, tbox=True) assert result['bbox'] == pytest.approx([-17.542069, 32.39669, -6.959389, 39.301139], abs=tolerance) + assert result["crs"] == "4326" assert result['tbox'] == ['2005-12-31', '2013-11-30'] + def test_not_found_file(): result = geoextent.fromFile('tests/testdata/empt.geojson', bbox=True) assert result is None + def test_not_supported_file_format(): result = geoextent.fromFile('tests/testdata/geojson/empty.geo', bbox=True) - assert result == None + assert result is None + def test_bbox_and_tbox_both_false(): with pytest.raises(Exception) as excinfo: diff --git a/tests/test_api_csv.py b/tests/test_api_csv.py index d8f6442..3a0fd0f 100644 --- a/tests/test_api_csv.py +++ b/tests/test_api_csv.py @@ -1,14 +1,16 @@ -import os -import sys import pytest from help_functions_test import tolerance import geoextent.lib.extent as geoextent + def test_csv_extract_bbox(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_lat&long.csv', bbox=True) assert "bbox" in result + assert "crs" in result assert "tbox" not in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" + def test_csv_extract_tbox(): result = geoextent.fromFile('tests/testdata/csv/cities_NL.csv', bbox=False, tbox=True) @@ -16,150 +18,204 @@ def test_csv_extract_tbox(): assert "tbox" in result assert result["tbox"] == ['2017-08-01', '2019-09-30'] + def test_csv_extract_bbox_and_tbox(): result = geoextent.fromFile('tests/testdata/csv/cities_NL.csv', bbox=True, tbox=True) assert "bbox" in result assert "tbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) assert result["tbox"] == ['2017-08-01', '2019-09-30'] + assert result["crs"] == "4326" + def test_empty_csv_file(): result = geoextent.fromFile('tests/testdata/csv/empty_csv.csv', bbox=True) assert result is None + def test_csv_extract_bbox_and_tbox_semicolon_delimiter(): result = geoextent.fromFile('tests/testdata/csv/csv_semicolon_delimiter.csv', bbox=True, tbox=True) assert "bbox" in result assert "tbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-08-01', '2019-09-30'] + def test_csv_extract_bbox_and_tbox_comma_delimiter(): result = geoextent.fromFile('tests/testdata/csv/csv_comma_delimiter.csv', bbox=True, tbox=True) assert "bbox" in result assert "tbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-08-01', '2019-09-30'] -def test_csv_extract_bbox_and_tbox_with_Time(): + +def test_csv_extract_bbox_and_tbox_with_time(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_Time.csv', bbox=True, tbox=True) assert "bbox" in result assert "tbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-08-01', '2019-09-30'] -def test_csv_extract_bbox_and_tbox_with_TimeStamp(): + +def test_csv_extract_bbox_and_tbox_with_timestamp(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_TimeStamp.csv', bbox=True, tbox=True) assert "bbox" in result assert "tbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-08-01', '2019-09-30'] -def test_csv_extract_bbox_and_tbox_with_TIME(): - result = geoextent.fromFile('tests/testdata/csv/cities_NL_TIME.csv', bbox=True, tbox=True) - assert "bbox" in result - assert "tbox" in result - assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) - assert result["tbox"] == ['2017-08-01', '2019-09-30'] -def test_csv_extract_bbox_and_tbox_with_Datetime(): +def test_csv_extract_bbox_and_tbox_with_datetime(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_Datetime.csv', bbox=True, tbox=True) assert "bbox" in result + assert "crs" in result assert "tbox" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-08-01', '2019-09-30'] -def test_csv_extract_bbox_and_tbox_with_LATITUDE_LONGITUDE(): + +def test_csv_extract_bbox_and_tbox_with_lat_long(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_LATITUDE.csv', bbox=True, tbox=True) assert "bbox" in result + assert "crs" in result assert "tbox" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-08-01', '2019-09-30'] -def test_csv_extract_bbox_and_tbox_with_LAT_LONG(): + +def test_csv_extract_bbox_and_tbox_with_lat_long_2(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_LAT.csv', bbox=True, tbox=True) assert "bbox" in result + assert "crs" in result assert "tbox" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-08-01', '2019-09-30'] -def test_csv_extract_bbox_and_tbox_with_TIME_DATE(): + +def test_csv_extract_bbox_and_tbox_with_time_date(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_TIME_DATE.csv', bbox=True, tbox=True) assert "bbox" in result + assert "crs" in result assert "tbox" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2010-09-01', '2019-09-30'] + def test_csv_extract_bbox_and_tbox_with_cols_diff_order(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_case1.csv', bbox=True, tbox=True) assert "bbox" in result + assert "crs" in result assert "tbox" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-08-01', '2019-09-30'] -def test_csv_extract_bbox_and_tbox_with_cols_diff_order_capitalisations(): + +def test_csv_extract_bbox_and_tbox_with_cols_diff_order_caps(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_case2.csv', bbox=True, tbox=True) assert "bbox" in result + assert "crs" in result assert "tbox" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-08-01', '2019-09-30'] + def test_csv_extract_bbox_and_tbox_with_cols_diff_order_and_alt_names1(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_case3.csv', bbox=True, tbox=True) assert "bbox" in result + assert "crs" in result assert "tbox" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-08-01', '2019-09-30'] + def test_csv_extract_bbox_and_tbox_with_cols_diff_order_and_alt_names2(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_case4.csv', bbox=True, tbox=True) assert "bbox" in result + assert "crs" in result assert "tbox" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert result["crs"] == "4326" assert result["tbox"] == ['2017-08-01', '2019-09-30'] + def test_csv_extract_bbox_and_tbox_with_cols_diff_order_and_alt_names3(): result = geoextent.fromFile('tests/testdata/csv/cities_NL_case5.csv', bbox=True, tbox=True) assert "bbox" in result + assert "crs" in result assert "tbox" in result assert result["bbox"] == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) assert result["tbox"] == ['2017-08-01', '2019-09-30'] + assert result["crs"] == "4326" + -def test_csv_extract_tbox_ISO8601_time_format(): - result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS.csv', bbox=False, tbox = True) +def test_csv_extract_tbox_iso8601_time_format(): + result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS.csv', bbox=False, tbox=True) assert "bbox" not in result + assert "crs" not in result assert "tbox" in result assert result["tbox"] == ['2017-04-08', '2020-02-06'] -def test_csv_extract_tbox_DD_MM_YYYY_time_format(): - result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS_dd_mm_yyyy.csv', bbox=False, tbox = True) + +def test_csv_extract_tbox_dd_mm_yyyy_time_format(): + result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS_dd_mm_yyyy.csv', bbox=False, tbox=True) assert "bbox" not in result + assert "crs" not in result assert "tbox" in result assert result["tbox"] == ['2017-04-19', '2018-01-31'] + def test_csv_extract_tbox_month_abbr_dd_yyyy_time_formats(): - result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS_month_abbr_dd_yyyy_time_format.csv', bbox=False, tbox = True) + result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS_month_abbr_dd_yyyy_time_format.csv', + bbox=False, tbox=True) assert "bbox" not in result + assert "crs" not in result assert result["tbox"] == ['2017-04-09', '2017-07-20'] + def test_csv_extract_tbox_two_diff_time_formats(): - result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS_mixed_time_formats.csv', bbox=False, tbox = True) + result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS_mixed_time_formats.csv', + bbox=False, tbox=True) assert "bbox" not in result assert "tbox" not in result + assert "crs" not in result + def test_csv_extract_tbox_random_sample(): - result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS.csv', bbox=False, tbox=True , num_sample = 5) + result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS.csv', bbox=False, tbox=True, num_sample=5) assert "bbox" not in result + assert "crs" not in result assert "tbox" in result assert result["tbox"] == ['2017-04-08', '2020-02-06'] + def test_csv_extract_tbox_random_sample_invalid(): - result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS.csv', bbox=False, tbox=True , num_sample = -1) + result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS.csv', + bbox=False, tbox=True, num_sample=-1) assert "bbox" not in result + assert "crs" not in result assert "tbox" not in result + def test_csv_extract_tbox_random_sample_value_larger_than_data(): - result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS.csv', bbox=False, tbox=True , num_sample = 1000000) + result = geoextent.fromFile('tests/testdata/csv/3DCMTcatalog_TakemuraEPS.csv', bbox=False, tbox=True, + num_sample=1000000) assert "bbox" not in result + assert "crs" not in result assert "tbox" in result - assert result["tbox"] == ['2017-04-08', '2020-02-06'] \ No newline at end of file + assert result["tbox"] == ['2017-04-08', '2020-02-06'] diff --git a/tests/test_api_geojson.py b/tests/test_api_geojson.py index ddcb7ab..3782ddb 100644 --- a/tests/test_api_geojson.py +++ b/tests/test_api_geojson.py @@ -1,30 +1,39 @@ -import os # used to get the location of the testdata -import sys import pytest from help_functions_test import tolerance import geoextent.lib.extent as geoextent + def test_geojson_extract_bbox(): result = geoextent.fromFile('tests/testdata/geojson/muenster_ring_zeit.geojson', bbox=True) - assert result["bbox"] == pytest.approx([7.601680, 51.948814, 7.647256, 51.974624],abs=tolerance) + assert "bbox" in result + assert "crs" in result + assert result["bbox"] == pytest.approx([7.601680, 51.948814, 7.647256, 51.974624], abs=tolerance) + assert result["crs"] == "4326" + def test_invalid_coordinate_geojson_extract_bbox(): result = geoextent.fromFile('tests/testdata/geojson/invalid_coordinate.geojson', bbox=True) - assert result["bbox"] is None + assert "bbox" not in result + assert "crs" not in result + def test_one_point_geojson_extract_bbox(): result = geoextent.fromFile('tests/testdata/geojson/onePoint.geojson', bbox=True) - assert result["bbox"] == pytest.approx([6.220493, 50.521503, 6.220493, 50.521503],abs=tolerance) + assert result["bbox"] == pytest.approx([6.220493, 50.521503, 6.220493, 50.521503], abs=tolerance) + assert result["crs"] == "4326" + def test_empty_file_geojson_extract_bbox(): result = geoextent.fromFile('tests/testdata/geojson/empty.geojson', bbox=True) assert result is None + def test_geojson_extract_time(): result = geoextent.fromFile('tests/testdata/geojson/muenster_ring_zeit.geojson', tbox=True) assert result["tbox"] == ['2018-11-14', '2018-11-14'] + def test_geojson_extract_only_time(): result = geoextent.fromFile('tests/testdata/geojson/muenster_ring_zeit.geojson', bbox=False, tbox=True) assert "bbox" not in result - assert result["tbox"] == ['2018-11-14', '2018-11-14'] \ No newline at end of file + assert result["tbox"] == ['2018-11-14', '2018-11-14'] diff --git a/tests/test_api_geotiff.py b/tests/test_api_geotiff.py index 586028a..7652682 100644 --- a/tests/test_api_geotiff.py +++ b/tests/test_api_geotiff.py @@ -1,17 +1,21 @@ import geoextent.lib.extent as geoextent from help_functions_test import tolerance import pytest -from osgeo import gdal + def test_geotiff_extract_bbox(): result = geoextent.fromFile('tests/testdata/tif/wf_100m_klas.tif', bbox=True) assert "bbox" in result + assert "crs" in result assert result["bbox"] == pytest.approx([5.915300, 50.310251, 9.468398, 52.530775], abs=tolerance) + assert result["crs"] == "4326" + def test_geotiff_extract_time(): - result = geoextent.fromFile('tests/testdata/tif/wf_100m_klas.tif', bbox=True) + result = geoextent.fromFile('tests/testdata/tif/wf_100m_klas.tif', tbox=True) assert "temporal_extent" not in result + def test_geotiff_crs_used(): result = geoextent.fromFile('tests/testdata/tif/wf_100m_klas.tif', bbox=True) assert "crs" in result diff --git a/tests/test_api_shapefile.py b/tests/test_api_shapefile.py index ccde1a5..509ba51 100644 --- a/tests/test_api_shapefile.py +++ b/tests/test_api_shapefile.py @@ -2,21 +2,28 @@ from help_functions_test import tolerance import geoextent.lib.extent as geoextent -def test_shapefile_withCRS_extract_bbox(): + +def test_shapefile_with_crs_extract_bbox(): result = geoextent.fromFile('tests/testdata/shapefile/gis_osm_buildings_a_free_1.shp', bbox=True, tbox=False) assert "temporal_extent" not in result assert result["bbox"] == pytest.approx([-167.400123, -89.998844, 166.700078, -60.708069], abs=tolerance) + assert result["crs"] == "4326" + -def test_shapefile_without_CRS_extract_bbox(): +def test_shapefile_without_crs_extract_bbox(): result = geoextent.fromFile('tests/testdata/shapefile/Abgrabungen_Kreis_Kleve_Shape.shp', bbox=True, tbox=False) - assert "tbox " not in result - assert result["bbox"] is None + assert "tbox" not in result + assert "bbox" not in result + assert "crs" not in result -def test_shapefile_extract_bbox_with_CRS(): + +def test_shapefile_extract_bbox_with_crs(): result = geoextent.fromFile('tests/testdata/shapefile/gis_osm_buildings_a_free_1.shp', bbox=True, tbox=False) assert "temporal_extent" not in result assert result["bbox"] == pytest.approx([-167.400123, -89.998844, 166.700078, -60.708069], abs=tolerance) - + assert result["crs"] == "4326" + + def test_shapefile_extract_time(): result = geoextent.fromFile('tests/testdata/shapefile/Abgrabungen_Kreis_Kleve_Shape.shp', bbox=False, tbox=True) assert "bbox" not in result diff --git a/tests/test_cli.py b/tests/test_cli.py index 6f6f767..223a55d 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,17 +1,18 @@ -import os # used to get the location of the testdata +import os # used to get the location of the testdata import pytest import tempfile from help_functions_test import create_zip, parse_coordinates, tolerance from osgeo import gdal -def test_helptext_direct(script_runner): + +def test_help_text_direct(script_runner): ret = script_runner.run('geoextent', '--help') assert ret.success, "process should return success" assert ret.stderr == '', "stderr should be empty" assert "geoextent [-h]" in ret.stdout, "usage instructions are printed to console" -def test_helptext_no_args(script_runner): +def test_help_text_no_args(script_runner): ret = script_runner.run('geoextent') assert ret.success, "process should return success" assert ret.stderr == '', "stderr should be empty" @@ -20,7 +21,7 @@ def test_helptext_no_args(script_runner): def test_error_no_file(script_runner): ret = script_runner.run('geoextent', 'doesntexist') - assert not ret.success, "process should return failue" + assert not ret.success, "process should return failure" assert ret.stderr != '', "stderr should not be empty" assert 'doesntexist' in ret.stderr, "wrong input is printed to console" assert ret.stdout == '' @@ -28,7 +29,7 @@ def test_error_no_file(script_runner): def test_error_no_option(script_runner): ret = script_runner.run('geoextent', 'README.md') - assert not ret.success, "process should return failue" + assert not ret.success, "process should return failure" assert ret.stderr != '', "stderr should not be empty" assert 'one of extraction options' in ret.stderr assert ret.stdout == '' @@ -36,7 +37,7 @@ def test_error_no_option(script_runner): def test_debug_output(script_runner): ret = script_runner.run('geoextent', - '-b', 'tests/testdata/geojson/muenster_ring_zeit.geojson') + '-b', 'tests/testdata/geojson/muenster_ring_zeit.geojson') assert ret.success, "process should return success" assert "DEBUG:geoextent" not in ret.stderr assert "INFO:geoextent" not in ret.stderr @@ -44,16 +45,17 @@ def test_debug_output(script_runner): assert "INFO:geoextent" not in ret.stdout # FIXME - #retd = script_runner.run('geoextent', + # ret = script_runner.run('geoextent', # '--debug', # '-b', # 'tests/testdata/geojson/muenster_ring_zeit.geojson') - #assert retd.success, "process should return success" - #assert "DEBUG:geoextent" in retd.stdout - #assert "geoextent" not in retd.stdout + # assert ret.success, "process should return success" + # assert "DEBUG:geoextent" in ret.stdout + # assert "geoextent" not in ret.stdout + # FIXME -#def test_debug_config_envvar(script_runner): +# def test_debug_config_env_var(script_runner): # os.environ["GEOEXTENT_DEBUG"] = "1" # this is picked up by the library, BUT the stdout is empty still # ret = script_runner.run('geoextent', '-b', 'tests/testdata/geojson/muenster_ring_zeit.geojson') # print(str(ret)) @@ -61,11 +63,12 @@ def test_debug_output(script_runner): # assert "DEBUG:geoextent" in ret.stdout # os.environ["GEOEXTENT_DEBUG"] = None + def test_geojson_invalid_second_input(script_runner): ret = script_runner.run('geoextent', - 'tests/testdata/geojson/muenster_ring_zeit.geojson', - 'tests/testdata/geojson/not_existing.geojson') - assert not ret.success, "process should return failue" + 'tests/testdata/geojson/muenster_ring_zeit.geojson', + 'tests/testdata/geojson/not_existing.geojson') + assert not ret.success, "process should return failure" assert ret.stderr != '', "stderr should not be empty" assert 'not a valid directory or file' in ret.stderr, "wrong input is printed to console" assert 'tests/testdata/geojson/not_existing.geojson' in ret.stderr, "wrong input is printed to console" @@ -73,40 +76,42 @@ def test_geojson_invalid_second_input(script_runner): def test_geojson_bbox(script_runner): - ret = script_runner.run('geoextent', '-b','tests/testdata/geojson/muenster_ring_zeit.geojson') + ret = script_runner.run('geoextent', '-b', 'tests/testdata/geojson/muenster_ring_zeit.geojson') assert ret.success, "process should return success" result = ret.stdout bboxList = parse_coordinates(result) assert bboxList == pytest.approx([7.601680, 51.948814, 7.647256, 51.974624], abs=tolerance) + assert "4326" in result def test_geojson_bbox_long_name(script_runner): ret = script_runner.run('geoextent', - '--bounding-box', 'tests/testdata/geojson/muenster_ring_zeit.geojson') + '--bounding-box', 'tests/testdata/geojson/muenster_ring_zeit.geojson') assert ret.success, "process should return success" result = ret.stdout bboxList = parse_coordinates(result) assert bboxList == pytest.approx([7.601680, 51.948814, 7.6472568, 51.974624], abs=tolerance) + assert "4326" in result def test_geojson_bbox_invalid_coordinates(script_runner): ret = script_runner.run('geoextent', - '-b', 'tests/testdata/gejson/invalid_coordinate.geojson') - assert not ret.success, "process should return success" + '-b', 'tests/testdata/geojson/invalid_coordinate.geojson') + assert ret.success, "process should return success" assert ret.stderr is not None - assert 'not a valid directory or file' in ret.stderr, "stderr should not be empty" + assert 'bbox' not in ret.stdout, "stderr should not be empty" def test_geojson_time(script_runner): ret = script_runner.run('geoextent', - '-t', 'tests/testdata/geojson/muenster_ring_zeit.geojson') + '-t', 'tests/testdata/geojson/muenster_ring_zeit.geojson') assert ret.success, "process should return success" - assert "['2018-11-14', '2018-11-14']" in ret.stdout, "time value is printed to console" + assert "['2018-11-14', '2018-11-14']" in ret.stdout, "time value is printed to console" def test_geojson_time_invalid(script_runner): ret = script_runner.run('geoextent', - '-t', 'tests/testdata/geojson/invalid_time.geojson') + '-t', 'tests/testdata/geojson/invalid_time.geojson') assert ret.success, "process should return success" assert "'tbox': None" in ret.stdout @@ -115,32 +120,34 @@ def test_print_supported_formats(script_runner): ret = script_runner.run('geoextent', '--formats') assert ret.success, "process should return success" assert ret.stderr == '', "stderr should be empty" - assert "Supported formats:" in ret.stdout, "list of supported formats is printed to console" + assert "Supported formats:" in ret.stdout, "list of supported formats is printed to console" @pytest.mark.skip(reason="file format not implemented yet") def test_netcdf_bbox(script_runner): ret = script_runner.run('geoextent', - '-b', 'tests/testdata/nc/ECMWF_ERA-40_subset.nc') + '-b', 'tests/testdata/nc/ECMWF_ERA-40_subset.nc') assert ret.success, "process should return success" assert ret.stderr == '', "stderr should be empty" result = ret.stdout bboxList = parse_coordinates(result) assert bboxList == pytest.approx([-90.0, 0.0, 90.0, 357.5], abs=tolerance) + assert "4326" in result + @pytest.mark.skip(reason="file format not implemented yet") def test_netcdf_time(script_runner): result = script_runner.run('geoextent', - '-t', 'tests/testdata/nc/ECMWF_ERA-40_subset.nc') + '-t', 'tests/testdata/nc/ECMWF_ERA-40_subset.nc') assert result.success, "process should return success" assert result.stderr == '', "stderr should be empty" - assert "['2002-07-01','2002-07-31']" in result.stdout, "time value is printed to console" + assert "['2002-07-01','2002-07-31']" in result.stdout, "time value is printed to console" @pytest.mark.skip(reason="file format not implemented yet") def test_netcdf_time_invalid(script_runner): ret = script_runner.run('geoextent', - '-b', 'tests/testdata/nc/ECMWF_ERA-40_subset.nc') + '-b', 'tests/testdata/nc/ECMWF_ERA-40_subset.nc') assert ret.success, "process should return success" assert ret.stderr is not None assert ret.stderr == 'invalid time format', "stderr should not be empty" @@ -151,13 +158,14 @@ def test_kml_bbox(script_runner): result = ret.stdout bboxList = parse_coordinates(result) assert bboxList == pytest.approx([7.594213, 51.942465, 7.618246, 51.957278], abs=tolerance) + assert "4326" in result def test_kml_time(script_runner): - ret = script_runner.run('geoextent', '-t', 'tests/testdata/kml/aasee.kml') + ret = script_runner.run('geoextent', '-t', 'tests/testdata/kml/TimeStamp_example.kml') assert ret.success, "process should return success" assert ret.stderr == '', "stderr should be empty" - assert "'tbox': None" in ret.stdout, "time value is printed to console" + assert "['2007-01-14', '2007-01-14']" in ret.stdout, "time value is printed to console" def test_kml_time_invalid(script_runner): @@ -170,46 +178,45 @@ def test_kml_time_invalid(script_runner): @pytest.mark.skipif(gdal.__version__.startswith("2"), reason="coordinate order mismatch for old GDAL versions") def test_geotiff_bbox(script_runner): ret = script_runner.run('geoextent', - '-b', 'tests/testdata/tif/wf_100m_klas.tif') + '-b', 'tests/testdata/tif/wf_100m_klas.tif') assert ret.success, "process should return success" assert ret.stderr == '', "stderr should be empty" result = ret.stdout - assert '4326' in result bboxList = parse_coordinates(result) assert bboxList == pytest.approx([5.915300, 50.310251, 9.468398, 52.530775], abs=tolerance) + assert "4326" in result def test_gpkg_bbox(script_runner): - ret = script_runner.run('geoextent','-b', 'tests/testdata/geopackage/nc.gpkg') + ret = script_runner.run('geoextent', '-b', 'tests/testdata/geopackage/nc.gpkg') result = ret.stdout assert ret.success, "process should return success" assert ret.stderr == '', "stderr should be empty" bboxList = parse_coordinates(result) assert bboxList == pytest.approx([-84.32383, 33.882102, -75.456585, 36.589757], abs=tolerance) + assert "4326" in result -def test_csv_bbox(script_runner, tmpdir): - ret = script_runner.run('geoextent', - '-b', 'tests/testdata/csv/cities_NL.csv') +def test_csv_bbox(script_runner): + ret = script_runner.run('geoextent', '-b', 'tests/testdata/csv/cities_NL.csv') assert ret.success, "process should return success" result = ret.stdout bboxList = parse_coordinates(result) assert bboxList == pytest.approx([4.3175, 51.434444, 6.574722, 53.217222], abs=tolerance) + assert "4326" in result -def test_csv_time(script_runner, tmpdir): - ret = script_runner.run('geoextent', - '-t', 'tests/testdata/csv/cities_NL.csv') + +def test_csv_time(script_runner): + ret = script_runner.run('geoextent', '-t', 'tests/testdata/csv/cities_NL.csv') assert ret.success, "process should return success" - #assert ret.stderr == '', "stderr should be empty" assert "['2017-08-01', '2019-09-30']" in ret.stdout, "time value is printed to console" -def test_csv_time_invalid(script_runner, tmpdir): - ret = script_runner.run('geoextent', - '-t', 'tests/testdata/csv/cities_NL_lat&long.csv') +def test_csv_time_invalid(script_runner): + ret = script_runner.run('geoextent', '-t', 'tests/testdata/csv/cities_NL_lat&long.csv') assert ret.success, "process should return success" assert ret.stderr is not None - assert "no TemporalExtent" in ret.stderr , "stderr should not be empty" + assert "no TemporalExtent" in ret.stderr, "stderr should not be empty" @pytest.mark.skipif("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", @@ -221,17 +228,18 @@ def test_gml_bbox(script_runner): result = ret.stdout bboxList = parse_coordinates(result) assert bboxList == pytest.approx([-17.542069, 32.39669, -6.959389, 39.301139]) + assert "4326" in result def test_gml_time(script_runner): ret = script_runner.run('geoextent', '-t', 'tests/testdata/gml/clc_1000_PT.gml') assert ret.success, "process should return success" assert ret.stderr == '', "stderr should be empty" - assert "['2005-12-31', '2013-11-30']" in ret.stdout, "time value is printed to console" + assert "['2005-12-31', '2013-11-30']" in ret.stdout, "time value is printed to console" def test_gml_only_one_time_feature_valid(script_runner): - ret = script_runner.run('geoextent','-t', 'tests/testdata/gml/mypolygon_px6_error_time_one_feature.gml') + ret = script_runner.run('geoextent', '-t', 'tests/testdata/gml/mypolygon_px6_error_time_one_feature.gml') assert ret.stdout assert "'tbox': ['2012-04-15', '2012-04-15']" in ret.stdout, "time value is printed to console" @@ -239,47 +247,53 @@ def test_gml_only_one_time_feature_valid(script_runner): def test_shp_bbox(script_runner): ret = script_runner.run('geoextent', '-b', 'tests/testdata/shapefile/Abgrabungen_Kreis_Kleve_Shape.shp') assert ret.success, "process should return success" - assert "'bbox': None" in ret.stdout + assert "'bbox'" not in ret.stdout @pytest.mark.skip(reason="multiple input files not implemented yet") def test_multiple_files(script_runner): ret = script_runner.run('python', 'geoextent', - '-b', 'tests/testdata/shapefile/Abgrabungen_Kreis_Kleve_Shape.shp', 'tests/testdata/geojson/ausgleichsflaechen_moers.geojson') + '-b', 'tests/testdata/shapefile/Abgrabungen_Kreis_Kleve_Shape.shp', + 'tests/testdata/geojson/ausgleichsflaechen_moers.geojson') assert ret.success, "process should return success" assert ret.stderr == '', "stderr should be empty" - assert "[7.6016807556152335, 51.94881477206191, 7.647256851196289, 51.974624029877454]" in ret.stdout, "bboxes and time values of all files inside folder, are printed to console" - assert "[6.574722, 51.434444, 4.3175, 53.217222]" in ret.stdout, "bboxes and time values of all files inside folder, are printed to console" - assert "[292063.81225905, 5618144.09259115, 302531.3161606, 5631223.82854667]" in ret.stdout, "bboxes and time values of all files inside folder, are printed to console" + assert "[7.6016807556152335, 51.94881477206191, 7.647256851196289, 51.974624029877454]" in ret.stdout,\ + "bboxes and time values of all files inside folder, are printed to console" + assert "[6.574722, 51.434444, 4.3175, 53.217222]" in ret.stdout, \ + "bboxes and time values of all files inside folder, are printed to console" + assert "[292063.81225905, 5618144.09259115, 302531.3161606, 5631223.82854667]" in ret.stdout, \ + "bboxes and time values of all files inside folder, are printed to console" def test_folder(script_runner): ret = script_runner.run('geoextent', - '-b','-t', 'tests/testdata/folders/folder_two_files') + '-b', '-t', 'tests/testdata/folders/folder_two_files') assert ret.success, "process should return success" assert ret.stderr == '', "stderr should be empty" result = ret.stdout bboxList = parse_coordinates(result) assert bboxList == pytest.approx([2.052333, 41.317038, 7.647256, 51.974624]) - assert "['2018-11-14', '2019-09-11']" in result,"merge time value of folder files, is printed to console" + assert "['2018-11-14', '2019-09-11']" in result, "merge time value of folder files, is printed to console" + assert "4326" in result def test_zipfile(script_runner): folder_name = "tests/testdata/folders/folder_one_file" with tempfile.NamedTemporaryFile(suffix=".zip") as tmp: create_zip(folder_name, tmp) - ret = script_runner.run('geoextent','-b','-t', tmp.name) + ret = script_runner.run('geoextent', '-b', '-t', tmp.name) assert ret.success, "process should return success" result = ret.stdout bboxList = parse_coordinates(result) assert bboxList == pytest.approx([7.601680, 51.948814, 7.647256, 51.974624]) assert "['2018-11-14', '2018-11-14']" in result + assert "4326" in result @pytest.mark.skip(reason="multiple input directories not implemented yet") def test_multiple_folders(script_runner): ret = script_runner.run('python', 'geoextent', - '-b', 'tests/testdata/shapefile', 'tests/testdata/geojson', 'tests/testdata/nc') + '-b', 'tests/testdata/shapefile', 'tests/testdata/geojson', 'tests/testdata/nc') assert ret.success, "process should return success" assert ret.stderr == '', "stderr should be empty" assert "full bbox" in ret.stdout, "joined bboxes of all files inside folder are printed to console" diff --git a/tests/testdata/kml/TimeStamp_example.kml b/tests/testdata/kml/TimeStamp_example.kml new file mode 100644 index 0000000..5e1a5c2 --- /dev/null +++ b/tests/testdata/kml/TimeStamp_example.kml @@ -0,0 +1,3293 @@ + + + + + + Points with TimeStamps + + + + + #check-hide-children + + + 2007-01-14T21:05:02Z + + #paddle-a + + -122.536226,37.86047,0 + + + + + 2007-01-14T21:05:20Z + + #hiker-icon + + -122.536422,37.860303,0 + + + + + 2007-01-14T21:05:43Z + + #hiker-icon + + -122.536688,37.860072,0 + + + + + 2007-01-14T21:06:04Z + + #hiker-icon + + -122.536923,37.859855,0 + + + + + 2007-01-14T21:06:24Z + + #hiker-icon + + -122.537116,37.85961000000001,0 + + + + + 2007-01-14T21:06:46Z + + #hiker-icon + + -122.537298,37.859336,0 + + + + + 2007-01-14T21:07:07Z + + #hiker-icon + + -122.537469,37.85907,0 + + + + + 2007-01-14T21:07:27Z + + #hiker-icon + + -122.537635,37.858822,0 + + + + + 2007-01-14T21:07:51Z + + #hiker-icon + + -122.537848,37.858526,0 + + + + + 2007-01-14T21:08:11Z + + #hiker-icon + + -122.538044,37.858288,0 + + + + + 2007-01-14T21:08:33Z + + #hiker-icon + + -122.538307,37.858064,0 + + + + + 2007-01-14T21:08:56Z + + #hiker-icon + + -122.538601,37.857837,0 + + + + + 2007-01-14T21:09:19Z + + #hiker-icon + + -122.53887,37.857604,0 + + + + + 2007-01-14T21:09:45Z + + #hiker-icon + + -122.539052,37.857262,0 + + + + + 2007-01-14T21:10:10Z + + #hiker-icon + + -122.539133,37.856913,0 + + + + + 2007-01-14T21:10:36Z + + #hiker-icon + + -122.539231,37.856547,0 + + + + + 2007-01-14T21:11:03Z + + #hiker-icon + + -122.539359,37.856184,0 + + + + + 2007-01-14T21:11:20Z + + #hiker-icon + + -122.539416,37.855954,0 + + + + + 2007-01-14T21:11:40Z + + #hiker-icon + + -122.539435,37.855658,0 + + + + + 2007-01-14T21:11:59Z + + #hiker-icon + + -122.539483,37.855391,0 + + + + + 2007-01-14T21:12:18Z + + #hiker-icon + + -122.539592,37.855156,0 + + + + + 2007-01-14T21:12:37Z + + #hiker-icon + + -122.539703,37.854897,0 + + + + + 2007-01-14T21:12:57Z + + #hiker-icon + + -122.539746,37.854626,0 + + + + + 2007-01-14T21:13:24Z + + #hiker-icon + + -122.539664,37.854252,0 + + + + + 2007-01-14T21:13:49Z + + #hiker-icon + + -122.539495,37.853932,0 + + + + + 2007-01-14T21:14:14Z + + #hiker-icon + + -122.539286,37.85362400000001,0 + + + + + 2007-01-14T21:14:37Z + + #hiker-icon + + -122.539098,37.853329,0 + + + + + 2007-01-14T21:14:57Z + + #hiker-icon + + -122.538985,37.853048,0 + + + + + 2007-01-14T21:15:20Z + + #hiker-icon + + -122.538914,37.852771,0 + + + + + 2007-01-14T21:15:41Z + + #hiker-icon + + -122.538819,37.852484,0 + + + + + 2007-01-14T21:16:03Z + + #hiker-icon + + -122.538684,37.852216,0 + + + + + 2007-01-14T21:16:20Z + + #hiker-icon + + -122.538614,37.852067,0 + + + + + 2007-01-14T21:16:38Z + + #hiker-icon + + -122.538589,37.851856,0 + + + + + 2007-01-14T21:17:02Z + + #hiker-icon + + -122.538577,37.851537,0 + + + + + 2007-01-14T21:17:22Z + + #hiker-icon + + -122.538563,37.851251,0 + + + + + 2007-01-14T21:17:38Z + + #hiker-icon + + -122.538614,37.851032,0 + + + + + 2007-01-14T21:17:57Z + + #hiker-icon + + -122.53872,37.850791,0 + + + + + 2007-01-14T21:18:09Z + + #hiker-icon + + -122.538852,37.850631,0 + + + + + 2007-01-14T21:18:12Z + + #hiker-icon + + -122.538882,37.850669,0 + + + + + 2007-01-14T21:18:16Z + + #hiker-icon + + -122.538923,37.850617,0 + + + + + 2007-01-14T21:18:38Z + + #hiker-icon + + -122.53916,37.850351,0 + + + + + 2007-01-14T21:18:53Z + + #hiker-icon + + -122.539314,37.85019,0 + + + + + 2007-01-14T21:19:14Z + + #hiker-icon + + -122.539546,37.84999,0 + + + + + 2007-01-14T21:19:34Z + + #hiker-icon + + -122.539812,37.849796,0 + + + + + 2007-01-14T21:19:39Z + + #hiker-icon + + -122.539881,37.849748,0 + + + + + 2007-01-14T21:19:57Z + + #hiker-icon + + -122.540122,37.849583,0 + + + + + 2007-01-14T21:20:18Z + + #hiker-icon + + -122.540437,37.849407,0 + + + + + 2007-01-14T21:20:38Z + + #hiker-icon + + -122.540743,37.84926,0 + + + + + 2007-01-14T21:20:59Z + + #hiker-icon + + -122.541108,37.849177,0 + + + + + 2007-01-14T21:21:21Z + + #hiker-icon + + -122.541501,37.849144,0 + + + + + 2007-01-14T21:21:40Z + + #hiker-icon + + -122.541836,37.849157,0 + + + + + 2007-01-14T21:22:02Z + + #hiker-icon + + -122.542233,37.84917,0 + + + + + 2007-01-14T21:22:21Z + + #hiker-icon + + -122.542547,37.849217,0 + + + + + 2007-01-14T21:22:42Z + + #hiker-icon + + -122.542872,37.849313,0 + + + + + 2007-01-14T21:22:59Z + + #hiker-icon + + -122.543172,37.849358,0 + + + + + 2007-01-14T21:23:18Z + + #hiker-icon + + -122.543491,37.849341,0 + + + + + 2007-01-14T21:23:36Z + + #hiker-icon + + -122.543791,37.84930800000001,0 + + + + + 2007-01-14T21:23:53Z + + #hiker-icon + + -122.544073,37.849332,0 + + + + + 2007-01-14T21:24:12Z + + #hiker-icon + + -122.544374,37.849453,0 + + + + + 2007-01-14T21:24:30Z + + #hiker-icon + + -122.544676,37.849545,0 + + + + + 2007-01-14T21:24:52Z + + #hiker-icon + + -122.545054,37.849568,0 + + + + + 2007-01-14T21:24:58Z + + #hiker-icon + + -122.545153,37.849569,0 + + + + + 2007-01-14T21:25:02Z + + #hiker-icon + + -122.545232,37.849542,0 + + + + + 2007-01-14T21:25:19Z + + #hiker-icon + + -122.54547,37.849385,0 + + + + + 2007-01-14T21:25:33Z + + #hiker-icon + + -122.5456,37.84923,0 + + + + + 2007-01-14T21:25:49Z + + #hiker-icon + + -122.545782,37.84905,0 + + + + + 2007-01-14T21:25:52Z + + #hiker-icon + + -122.545823,37.849013,0 + + + + + 2007-01-14T21:26:01Z + + #hiker-icon + + -122.545975,37.84898500000001,0 + + + + + 2007-01-14T21:26:21Z + + #hiker-icon + + -122.545996,37.849,0 + + + + + 2007-01-14T21:26:42Z + + #hiker-icon + + -122.546052,37.84898500000001,0 + + + + + 2007-01-14T21:26:48Z + + #hiker-icon + + -122.546132,37.848965,0 + + + + + 2007-01-14T21:26:57Z + + #hiker-icon + + -122.546257,37.848953,0 + + + + + 2007-01-14T21:26:58Z + + #hiker-icon + + -122.54627,37.84895,0 + + + + + 2007-01-14T21:27:14Z + + #hiker-icon + + -122.546491,37.848907,0 + + + + + 2007-01-14T21:27:30Z + + #hiker-icon + + -122.546732,37.848859,0 + + + + + 2007-01-14T21:27:31Z + + #hiker-icon + + -122.546749,37.848855,0 + + + + + 2007-01-14T21:27:36Z + + #hiker-icon + + -122.546824,37.848835,0 + + + + + 2007-01-14T21:27:45Z + + #hiker-icon + + -122.546964,37.848786,0 + + + + + 2007-01-14T21:27:58Z + + #hiker-icon + + -122.547167,37.848765,0 + + + + + 2007-01-14T21:28:16Z + + #hiker-icon + + -122.547391,37.848648,0 + + + + + 2007-01-14T21:28:29Z + + #hiker-icon + + -122.547531,37.848596,0 + + + + + 2007-01-14T21:28:42Z + + #hiker-icon + + -122.547665,37.84852900000001,0 + + + + + 2007-01-14T21:28:57Z + + #hiker-icon + + -122.54786,37.848475,0 + + + + + 2007-01-14T21:29:08Z + + #hiker-icon + + -122.547943,37.848462,0 + + + + + 2007-01-14T21:29:26Z + + #hiker-icon + + -122.548193,37.848387,0 + + + + + 2007-01-14T21:29:45Z + + #hiker-icon + + -122.548498,37.84833800000001,0 + + + + + 2007-01-14T21:29:53Z + + #hiker-icon + + -122.548631,37.84831100000001,0 + + + + + 2007-01-14T21:30:02Z + + #hiker-icon + + -122.548775,37.848284,0 + + + + + 2007-01-14T21:30:10Z + + #hiker-icon + + -122.548902,37.848247,0 + + + + + 2007-01-14T21:30:23Z + + #hiker-icon + + -122.549108,37.848208,0 + + + + + 2007-01-14T21:30:43Z + + #hiker-icon + + -122.549407,37.84816800000001,0 + + + + + 2007-01-14T21:31:01Z + + #hiker-icon + + -122.549669,37.848124,0 + + + + + 2007-01-14T21:31:10Z + + #hiker-icon + + -122.549801,37.848113,0 + + + + + 2007-01-14T21:31:23Z + + #hiker-icon + + -122.549947,37.848072,0 + + + + + 2007-01-14T21:31:24Z + + #hiker-icon + + -122.549959,37.848071,0 + + + + + 2007-01-14T21:31:32Z + + #hiker-icon + + -122.55005,37.848054,0 + + + + + 2007-01-14T21:31:34Z + + #hiker-icon + + -122.55007,37.848042,0 + + + + + 2007-01-14T21:31:44Z + + #hiker-icon + + -122.550211,37.848038,0 + + + + + 2007-01-14T21:31:45Z + + #hiker-icon + + -122.550222,37.84804,0 + + + + + 2007-01-14T21:31:52Z + + #hiker-icon + + -122.550298,37.848048,0 + + + + + 2007-01-14T21:31:54Z + + #hiker-icon + + -122.55032,37.848064,0 + + + + + 2007-01-14T21:31:59Z + + #hiker-icon + + -122.550377,37.848071,0 + + + + + 2007-01-14T21:32:15Z + + #hiker-icon + + -122.550508,37.848079,0 + + + + + 2007-01-14T21:32:25Z + + #hiker-icon + + -122.550612,37.848093,0 + + + + + 2007-01-14T21:32:42Z + + #hiker-icon + + -122.550798,37.848142,0 + + + + + 2007-01-14T21:32:58Z + + #hiker-icon + + -122.550954,37.848198,0 + + + + + 2007-01-14T21:33:12Z + + #hiker-icon + + -122.551029,37.848263,0 + + + + + 2007-01-14T21:33:17Z + + #hiker-icon + + -122.551052,37.848294,0 + + + + + 2007-01-14T21:33:35Z + + #hiker-icon + + -122.551051,37.848377,0 + + + + + 2007-01-14T21:34:05Z + + #hiker-icon + + -122.551036,37.848391,0 + + + + + 2007-01-14T21:34:08Z + + #hiker-icon + + -122.551035,37.848404,0 + + + + + 2007-01-14T21:34:22Z + + #hiker-icon + + -122.551027,37.848572,0 + + + + + 2007-01-14T21:34:40Z + + #hiker-icon + + -122.55111,37.848737,0 + + + + + 2007-01-14T21:34:45Z + + #hiker-icon + + -122.551174,37.848753,0 + + + + + 2007-01-14T21:34:58Z + + #hiker-icon + + -122.551349,37.848748,0 + + + + + 2007-01-14T21:35:02Z + + #hiker-icon + + -122.551393,37.848749,0 + + + + + 2007-01-14T21:35:14Z + + #hiker-icon + + -122.551523,37.848725,0 + + + + + 2007-01-14T21:35:27Z + + #hiker-icon + + -122.551605,37.84869,0 + + + + + 2007-01-14T21:35:28Z + + #hiker-icon + + -122.551618,37.84869,0 + + + + + 2007-01-14T21:35:35Z + + #hiker-icon + + -122.551706,37.848706,0 + + + + + 2007-01-14T21:35:39Z + + #hiker-icon + + -122.551749,37.848723,0 + + + + + 2007-01-14T21:35:57Z + + #hiker-icon + + -122.551912,37.84882,0 + + + + + 2007-01-14T21:36:21Z + + #hiker-icon + + -122.552057,37.84893700000001,0 + + + + + 2007-01-14T21:36:39Z + + #hiker-icon + + -122.552246,37.849049,0 + + + + + 2007-01-14T21:36:56Z + + #hiker-icon + + -122.552456,37.849139,0 + + + + + 2007-01-14T21:37:11Z + + #hiker-icon + + -122.552625,37.849204,0 + + + + + 2007-01-14T21:37:24Z + + #hiker-icon + + -122.55277,37.84926100000001,0 + + + + + 2007-01-14T21:37:36Z + + #hiker-icon + + -122.552912,37.849336,0 + + + + + 2007-01-14T21:37:53Z + + #hiker-icon + + -122.553067,37.849473,0 + + + + + 2007-01-14T21:38:10Z + + #hiker-icon + + -122.553201,37.849628,0 + + + + + 2007-01-14T21:38:27Z + + #hiker-icon + + -122.553378,37.849791,0 + + + + + 2007-01-14T21:38:44Z + + #hiker-icon + + -122.553569,37.849901,0 + + + + + 2007-01-14T21:39:02Z + + #hiker-icon + + -122.553731,37.850078,0 + + + + + 2007-01-14T21:39:13Z + + #hiker-icon + + -122.553813,37.850175,0 + + + + + 2007-01-14T21:39:32Z + + #hiker-icon + + -122.553869,37.850179,0 + + + + + 2007-01-14T21:39:44Z + + #hiker-icon + + -122.554075,37.850236,0 + + + + + 2007-01-14T21:39:49Z + + #hiker-icon + + -122.554163,37.850251,0 + + + + + 2007-01-14T21:40:03Z + + #hiker-icon + + -122.55437,37.850251,0 + + + + + 2007-01-14T21:40:11Z + + #hiker-icon + + -122.554482,37.850234,0 + + + + + 2007-01-14T21:40:12Z + + #hiker-icon + + -122.554496,37.850233,0 + + + + + 2007-01-14T21:40:15Z + + #hiker-icon + + -122.554546,37.850239,0 + + + + + 2007-01-14T21:40:33Z + + #hiker-icon + + -122.55478,37.850192,0 + + + + + 2007-01-14T21:40:45Z + + #hiker-icon + + -122.554946,37.850139,0 + + + + + 2007-01-14T21:41:02Z + + #hiker-icon + + -122.555195,37.850053,0 + + + + + 2007-01-14T21:41:19Z + + #hiker-icon + + -122.55542,37.849988,0 + + + + + 2007-01-14T21:41:40Z + + #hiker-icon + + -122.555722,37.84995500000001,0 + + + + + 2007-01-14T21:41:43Z + + #hiker-icon + + -122.555764,37.84995,0 + + + + + 2007-01-14T21:42:03Z + + #hiker-icon + + -122.556042,37.849935,0 + + + + + 2007-01-14T21:42:10Z + + #hiker-icon + + -122.556136,37.849933,0 + + + + + 2007-01-14T21:42:27Z + + #hiker-icon + + -122.556379,37.849946,0 + + + + + 2007-01-14T21:42:45Z + + #hiker-icon + + -122.556523,37.849952,0 + + + + + 2007-01-14T21:43:03Z + + #hiker-icon + + -122.5566,37.84995,0 + + + + + 2007-01-14T21:43:30Z + + #hiker-icon + + -122.556643,37.84995,0 + + + + + 2007-01-14T21:43:47Z + + #hiker-icon + + -122.556611,37.849908,0 + + + + + 2007-01-14T21:43:48Z + + #hiker-icon + + -122.556611,37.849909,0 + + + + + 2007-01-14T21:43:50Z + + #hiker-icon + + -122.55661,37.849909,0 + + + + + 2007-01-14T21:44:26Z + + #hiker-icon + + -122.55664,37.84993,0 + + + + + 2007-01-14T21:44:51Z + + #hiker-icon + + -122.556644,37.849926,0 + + + + + 2007-01-14T21:44:57Z + + #hiker-icon + + -122.556642,37.849932,0 + + + + + 2007-01-14T21:45:27Z + + #hiker-icon + + -122.556634,37.849933,0 + + + + + 2007-01-14T21:45:41Z + + #hiker-icon + + -122.556659,37.849917,0 + + + + + 2007-01-14T21:45:44Z + + #hiker-icon + + -122.556675,37.849894,0 + + + + + 2007-01-14T21:46:11Z + + #hiker-icon + + -122.556675,37.849894,0 + + + + + 2007-01-14T21:46:12Z + + #hiker-icon + + -122.556677,37.849896,0 + + + + + 2007-01-14T21:46:31Z + + #hiker-icon + + -122.556678,37.849898,0 + + + + + 2007-01-14T21:46:47Z + + #hiker-icon + + -122.556706,37.849886,0 + + + + + 2007-01-14T21:47:04Z + + #hiker-icon + + -122.556681,37.849887,0 + + + + + 2007-01-14T21:47:05Z + + #hiker-icon + + -122.556675,37.849887,0 + + + + + 2007-01-14T21:47:19Z + + #hiker-icon + + -122.556671,37.84993,0 + + + + + 2007-01-14T21:47:45Z + + #hiker-icon + + -122.556667,37.849926,0 + + + + + 2007-01-14T21:47:50Z + + #hiker-icon + + -122.556635,37.849954,0 + + + + + 2007-01-14T21:47:53Z + + #hiker-icon + + -122.556615,37.84995500000001,0 + + + + + 2007-01-14T21:48:07Z + + #hiker-icon + + -122.556443,37.849953,0 + + + + + 2007-01-14T21:48:21Z + + #hiker-icon + + -122.556403,37.850071,0 + + + + + 2007-01-14T21:48:41Z + + #hiker-icon + + -122.556335,37.850306,0 + + + + + 2007-01-14T21:49:02Z + + #hiker-icon + + -122.556174,37.85055,0 + + + + + 2007-01-14T21:49:23Z + + #hiker-icon + + -122.555988,37.850788,0 + + + + + 2007-01-14T21:49:46Z + + #hiker-icon + + -122.555773,37.851024,0 + + + + + 2007-01-14T21:50:05Z + + #hiker-icon + + -122.555595,37.851226,0 + + + + + 2007-01-14T21:50:29Z + + #hiker-icon + + -122.555328,37.85144,0 + + + + + 2007-01-14T21:50:43Z + + #hiker-icon + + -122.55515,37.851534,0 + + + + + 2007-01-14T21:51:05Z + + #hiker-icon + + -122.554864,37.851674,0 + + + + + 2007-01-14T21:51:25Z + + #hiker-icon + + -122.554738,37.851895,0 + + + + + 2007-01-14T21:51:48Z + + #hiker-icon + + -122.554689,37.85218700000001,0 + + + + + 2007-01-14T21:52:00Z + + #hiker-icon + + -122.554663,37.852335,0 + + + + + 2007-01-14T21:52:18Z + + #hiker-icon + + -122.554635,37.852549,0 + + + + + 2007-01-14T21:52:36Z + + #hiker-icon + + -122.554578,37.852755,0 + + + + + 2007-01-14T21:52:58Z + + #hiker-icon + + -122.554504,37.853015,0 + + + + + 2007-01-14T21:53:16Z + + #hiker-icon + + -122.554472,37.853225,0 + + + + + 2007-01-14T21:53:17Z + + #hiker-icon + + -122.554471,37.853236,0 + + + + + 2007-01-14T21:53:35Z + + #hiker-icon + + -122.55445,37.853432,0 + + + + + 2007-01-14T21:53:51Z + + #hiker-icon + + -122.554382,37.853578,0 + + + + + 2007-01-14T21:53:57Z + + #hiker-icon + + -122.554324,37.853611,0 + + + + + 2007-01-14T21:53:58Z + + #hiker-icon + + -122.554314,37.853617,0 + + + + + 2007-01-14T21:54:14Z + + #hiker-icon + + -122.554121,37.853707,0 + + + + + 2007-01-14T21:54:33Z + + #hiker-icon + + -122.553918,37.853836,0 + + + + + 2007-01-14T21:54:47Z + + #hiker-icon + + -122.553819,37.853924,0 + + + + + 2007-01-14T21:55:06Z + + #hiker-icon + + -122.553632,37.85411,0 + + + + + 2007-01-14T21:55:29Z + + #hiker-icon + + -122.55343,37.854376,0 + + + + + 2007-01-14T21:55:52Z + + #hiker-icon + + -122.553314,37.854678,0 + + + + + 2007-01-14T21:56:13Z + + #hiker-icon + + -122.553247,37.854957,0 + + + + + 2007-01-14T21:56:37Z + + #hiker-icon + + -122.553165,37.855282,0 + + + + + 2007-01-14T21:57:00Z + + #hiker-icon + + -122.553062,37.855567,0 + + + + + 2007-01-14T21:57:16Z + + #hiker-icon + + -122.55298,37.855748,0 + + + + + 2007-01-14T21:57:20Z + + #hiker-icon + + -122.552959,37.855792,0 + + + + + 2007-01-14T21:57:36Z + + #hiker-icon + + -122.552895,37.855962,0 + + + + + 2007-01-14T21:57:42Z + + #hiker-icon + + -122.552879,37.85602,0 + + + + + 2007-01-14T21:58:02Z + + #hiker-icon + + -122.552839,37.856229,0 + + + + + 2007-01-14T21:58:16Z + + #hiker-icon + + -122.552831,37.856301,0 + + + + + 2007-01-14T21:58:29Z + + #hiker-icon + + -122.552816,37.856365,0 + + + + + 2007-01-14T21:59:03Z + + #hiker-icon + + -122.552829,37.85638,0 + + + + + 2007-01-14T21:59:26Z + + #hiker-icon + + -122.552854,37.856395,0 + + + + + 2007-01-14T21:59:27Z + + #hiker-icon + + -122.552854,37.856395,0 + + + + + 2007-01-14T21:59:45Z + + #hiker-icon + + -122.552859,37.856395,0 + + + + + 2007-01-14T22:00:00Z + + #hiker-icon + + -122.552875,37.85639700000001,0 + + + + + 2007-01-14T22:00:45Z + + #hiker-icon + + -122.552872,37.856428,0 + + + + + 2007-01-14T22:00:56Z + + #hiker-icon + + -122.552874,37.856505,0 + + + + + 2007-01-14T22:01:04Z + + #hiker-icon + + -122.552885,37.856595,0 + + + + + 2007-01-14T22:01:21Z + + #hiker-icon + + -122.552969,37.856779,0 + + + + + 2007-01-14T22:01:39Z + + #hiker-icon + + -122.553072,37.856989,0 + + + + + 2007-01-14T22:01:57Z + + #hiker-icon + + -122.553274,37.857177,0 + + + + + 2007-01-14T22:02:00Z + + #hiker-icon + + -122.553306,37.857201,0 + + + + + 2007-01-14T22:02:03Z + + #hiker-icon + + -122.553333,37.857225,0 + + + + + 2007-01-14T22:02:30Z + + #hiker-icon + + -122.553648,37.857481,0 + + + + + 2007-01-14T22:02:43Z + + #hiker-icon + + -122.553785,37.857599,0 + + + + + 2007-01-14T22:03:03Z + + #hiker-icon + + -122.553873,37.857817,0 + + + + + 2007-01-14T22:03:21Z + + #hiker-icon + + -122.553916,37.858027,0 + + + + + 2007-01-14T22:03:37Z + + #hiker-icon + + -122.55393,37.858198,0 + + + + + 2007-01-14T22:03:38Z + + #hiker-icon + + -122.55393,37.85821,0 + + + + + 2007-01-14T22:03:54Z + + #hiker-icon + + -122.55395,37.858388,0 + + + + + 2007-01-14T22:04:05Z + + #hiker-icon + + -122.553966,37.858512,0 + + + + + 2007-01-14T22:04:26Z + + #hiker-icon + + -122.553999,37.858743,0 + + + + + 2007-01-14T22:04:38Z + + #hiker-icon + + -122.554053,37.858876,0 + + + + + 2007-01-14T22:04:49Z + + #hiker-icon + + -122.554103,37.85900000000001,0 + + + + + 2007-01-14T22:05:11Z + + #hiker-icon + + -122.554156,37.859226,0 + + + + + 2007-01-14T22:05:24Z + + #hiker-icon + + -122.554124,37.859199,0 + + + + + 2007-01-14T22:05:47Z + + #hiker-icon + + -122.554149,37.859204,0 + + + + + 2007-01-14T22:05:57Z + + #hiker-icon + + -122.554159,37.859216,0 + + + + + 2007-01-14T22:06:26Z + + #hiker-icon + + -122.554168,37.859233,0 + + + + + 2007-01-14T22:06:40Z + + #hiker-icon + + -122.554143,37.859201,0 + + + + + 2007-01-14T22:06:53Z + + #hiker-icon + + -122.55409,37.859159,0 + + + + + 2007-01-14T22:06:57Z + + #hiker-icon + + -122.554045,37.859144,0 + + + + + 2007-01-14T22:07:17Z + + #hiker-icon + + -122.553865,37.859067,0 + + + + + 2007-01-14T22:07:28Z + + #hiker-icon + + -122.553739,37.859031,0 + + + + + 2007-01-14T22:07:29Z + + #hiker-icon + + -122.55373,37.859033,0 + + + + + 2007-01-14T22:07:33Z + + #hiker-icon + + -122.553671,37.859024,0 + + + + + 2007-01-14T22:07:50Z + + #hiker-icon + + -122.55342,37.858973,0 + + + + + 2007-01-14T22:08:07Z + + #hiker-icon + + -122.553163,37.858913,0 + + + + + 2007-01-14T22:08:20Z + + #hiker-icon + + -122.552953,37.858878,0 + + + + + 2007-01-14T22:08:33Z + + #hiker-icon + + -122.552763,37.858842,0 + + + + + 2007-01-14T22:08:49Z + + #hiker-icon + + -122.552562,37.858801,0 + + + + + 2007-01-14T22:09:03Z + + #hiker-icon + + -122.552393,37.858758,0 + + + + + 2007-01-14T22:09:24Z + + #hiker-icon + + -122.552332,37.858755,0 + + + + + 2007-01-14T22:09:33Z + + #hiker-icon + + -122.552329,37.858754,0 + + + + + 2007-01-14T22:09:47Z + + #hiker-icon + + -122.552151,37.858694,0 + + + + + 2007-01-14T22:10:00Z + + #hiker-icon + + -122.552005,37.858648,0 + + + + + 2007-01-14T22:10:13Z + + #hiker-icon + + -122.551841,37.858596,0 + + + + + 2007-01-14T22:10:25Z + + #hiker-icon + + -122.551712,37.85856,0 + + + + + 2007-01-14T22:10:26Z + + #hiker-icon + + -122.5517,37.858559,0 + + + + + 2007-01-14T22:10:43Z + + #hiker-icon + + -122.551566,37.858543,0 + + + + + 2007-01-14T22:10:54Z + + #hiker-icon + + -122.551447,37.858517,0 + + + + + 2007-01-14T22:11:08Z + + #hiker-icon + + -122.551297,37.858501,0 + + + + + 2007-01-14T22:11:23Z + + #hiker-icon + + -122.551116,37.85849,0 + + + + + 2007-01-14T22:11:29Z + + #hiker-icon + + -122.551026,37.85848,0 + + + + + 2007-01-14T22:11:43Z + + #hiker-icon + + -122.550852,37.858447,0 + + + + + 2007-01-14T22:11:56Z + + #hiker-icon + + -122.550686,37.858419,0 + + + + + 2007-01-14T22:12:11Z + + #hiker-icon + + -122.550492,37.858377,0 + + + + + 2007-01-14T22:12:28Z + + #hiker-icon + + -122.550278,37.858329,0 + + + + + 2007-01-14T22:12:38Z + + #hiker-icon + + -122.550155,37.858294,0 + + + + + 2007-01-14T22:12:52Z + + #hiker-icon + + -122.549964,37.85824,0 + + + + + 2007-01-14T22:13:01Z + + #hiker-icon + + -122.549842,37.858204,0 + + + + + 2007-01-14T22:13:07Z + + #hiker-icon + + -122.549751,37.858177,0 + + + + + 2007-01-14T22:13:31Z + + #hiker-icon + + -122.549378,37.858076,0 + + + + + 2007-01-14T22:13:46Z + + #hiker-icon + + -122.549129,37.858037,0 + + + + + 2007-01-14T22:14:02Z + + #hiker-icon + + -122.548878,37.858007,0 + + + + + 2007-01-14T22:14:24Z + + #hiker-icon + + -122.548516,37.857961,0 + + + + + 2007-01-14T22:14:40Z + + #hiker-icon + + -122.548255,37.857918,0 + + + + + 2007-01-14T22:15:05Z + + #hiker-icon + + -122.547869,37.857801,0 + + + + + 2007-01-14T22:15:11Z + + #hiker-icon + + -122.547771,37.85777,0 + + + + + 2007-01-14T22:15:31Z + + #hiker-icon + + -122.547492,37.857641,0 + + + + + 2007-01-14T22:15:50Z + + #hiker-icon + + -122.547241,37.857503,0 + + + + + 2007-01-14T22:16:07Z + + #hiker-icon + + -122.547007,37.857382,0 + + + + + 2007-01-14T22:16:27Z + + #hiker-icon + + -122.546804,37.857246,0 + + + + + 2007-01-14T22:16:35Z + + #hiker-icon + + -122.546788,37.857248,0 + + + + + 2007-01-14T22:16:49Z + + #hiker-icon + + -122.546641,37.857136,0 + + + + + 2007-01-14T22:17:11Z + + #hiker-icon + + -122.546387,37.856931,0 + + + + + 2007-01-14T22:17:26Z + + #hiker-icon + + -122.546223,37.856791,0 + + + + + 2007-01-14T22:17:41Z + + #hiker-icon + + -122.546094,37.856643,0 + + + + + 2007-01-14T22:18:02Z + + #hiker-icon + + -122.5459,37.856408,0 + + + + + 2007-01-14T22:18:23Z + + #hiker-icon + + -122.545686,37.856179,0 + + + + + 2007-01-14T22:18:46Z + + #hiker-icon + + -122.545384,37.85598900000001,0 + + + + + 2007-01-14T22:19:06Z + + #hiker-icon + + -122.545123,37.855809,0 + + + + + 2007-01-14T22:19:25Z + + #hiker-icon + + -122.544885,37.855619,0 + + + + + 2007-01-14T22:19:48Z + + #hiker-icon + + -122.544598,37.855384,0 + + + + + 2007-01-14T22:20:11Z + + #hiker-icon + + -122.544283,37.855176,0 + + + + + 2007-01-14T22:20:14Z + + #hiker-icon + + -122.544247,37.855155,0 + + + + + 2007-01-14T22:20:32Z + + #hiker-icon + + -122.544053,37.855089,0 + + + + + 2007-01-14T22:20:48Z + + #hiker-icon + + -122.54376,37.855111,0 + + + + + 2007-01-14T22:20:54Z + + #hiker-icon + + -122.543669,37.855075,0 + + + + + 2007-01-14T22:21:07Z + + #hiker-icon + + -122.543496,37.854991,0 + + + + + 2007-01-14T22:21:22Z + + #hiker-icon + + -122.54326,37.854948,0 + + + + + 2007-01-14T22:21:36Z + + #hiker-icon + + -122.543053,37.85498,0 + + + + + 2007-01-14T22:21:49Z + + #hiker-icon + + -122.542877,37.855009,0 + + + + + 2007-01-14T22:22:02Z + + #hiker-icon + + -122.542706,37.855053,0 + + + + + 2007-01-14T22:22:16Z + + #hiker-icon + + -122.542502,37.855106,0 + + + + + 2007-01-14T22:22:29Z + + #hiker-icon + + -122.542326,37.855157,0 + + + + + 2007-01-14T22:22:33Z + + #hiker-icon + + -122.542271,37.855174,0 + + + + + 2007-01-14T22:22:51Z + + #hiker-icon + + -122.542032,37.855274,0 + + + + + 2007-01-14T22:23:03Z + + #hiker-icon + + -122.541884,37.855366,0 + + + + + 2007-01-14T22:23:14Z + + #hiker-icon + + -122.541792,37.855462,0 + + + + + 2007-01-14T22:23:19Z + + #hiker-icon + + -122.541749,37.855511,0 + + + + + 2007-01-14T22:23:35Z + + #hiker-icon + + -122.541591,37.855648,0 + + + + + 2007-01-14T22:23:49Z + + #hiker-icon + + -122.541406,37.855657,0 + + + + + 2007-01-14T22:24:00Z + + #hiker-icon + + -122.541259,37.855652,0 + + + + + 2007-01-14T22:24:16Z + + #hiker-icon + + -122.54105,37.855661,0 + + + + + 2007-01-14T22:24:32Z + + #hiker-icon + + -122.54082,37.855726,0 + + + + + 2007-01-14T22:24:49Z + + #hiker-icon + + -122.540684,37.85588,0 + + + + + 2007-01-14T22:24:57Z + + #hiker-icon + + -122.540675,37.855979,0 + + + + + 2007-01-14T22:25:13Z + + #hiker-icon + + -122.540681,37.856168,0 + + + + + 2007-01-14T22:25:31Z + + #hiker-icon + + -122.540751,37.856381,0 + + + + + 2007-01-14T22:25:48Z + + #hiker-icon + + -122.540897,37.856542,0 + + + + + 2007-01-14T22:26:02Z + + #hiker-icon + + -122.541028,37.85665,0 + + + + + 2007-01-14T22:26:17Z + + #hiker-icon + + -122.541184,37.85674700000001,0 + + + + + 2007-01-14T22:26:29Z + + #hiker-icon + + -122.541284,37.856854,0 + + + + + 2007-01-14T22:26:33Z + + #hiker-icon + + -122.541262,37.856892,0 + + + + + 2007-01-14T22:26:54Z + + #hiker-icon + + -122.54111,37.85694800000001,0 + + + + + 2007-01-14T22:27:14Z + + #hiker-icon + + -122.540917,37.85694,0 + + + + + 2007-01-14T22:27:33Z + + #hiker-icon + + -122.540655,37.856934,0 + + + + + 2007-01-14T22:27:53Z + + #hiker-icon + + -122.540629,37.856914,0 + + + + + 2007-01-14T22:28:05Z + + #hiker-icon + + -122.540558,37.85690000000001,0 + + + + + 2007-01-14T22:28:09Z + + #hiker-icon + + -122.540558,37.85690000000001,0 + + + + + 2007-01-14T22:28:19Z + + #hiker-icon + + -122.540401,37.856811,0 + + + + + 2007-01-14T22:28:38Z + + #hiker-icon + + -122.540101,37.85667,0 + + + + + 2007-01-14T22:28:57Z + + #hiker-icon + + -122.539866,37.856554,0 + + + + + 2007-01-14T22:29:15Z + + #hiker-icon + + -122.539748,37.856432,0 + + + + + 2007-01-14T22:29:36Z + + #hiker-icon + + -122.539409,37.85629100000001,0 + + + + + 2007-01-14T22:29:49Z + + #hiker-icon + + -122.539304,37.85630200000001,0 + + + + + 2007-01-14T22:29:56Z + + #hiker-icon + + -122.539255,37.856381,0 + + + + + 2007-01-14T22:30:14Z + + #hiker-icon + + -122.539186,37.856594,0 + + + + + 2007-01-14T22:30:30Z + + #hiker-icon + + -122.539135,37.85677,0 + + + + + 2007-01-14T22:30:53Z + + #hiker-icon + + -122.539079,37.857076,0 + + + + + 2007-01-14T22:31:14Z + + #hiker-icon + + -122.538995,37.857336,0 + + + + + 2007-01-14T22:31:34Z + + #hiker-icon + + -122.538852,37.857571,0 + + + + + 2007-01-14T22:31:49Z + + #hiker-icon + + -122.538694,37.857713,0 + + + + + 2007-01-14T22:31:55Z + + #hiker-icon + + -122.538667,37.857763,0 + + + + + 2007-01-14T22:32:10Z + + #hiker-icon + + -122.538574,37.857842,0 + + + + + 2007-01-14T22:32:19Z + + #hiker-icon + + -122.538469,37.857902,0 + + + + + 2007-01-14T22:32:23Z + + #hiker-icon + + -122.538441,37.857931,0 + + + + + 2007-01-14T22:32:43Z + + #hiker-icon + + -122.538206,37.858103,0 + + + + + 2007-01-14T22:33:03Z + + #hiker-icon + + -122.537985,37.858287,0 + + + + + 2007-01-14T22:33:23Z + + #hiker-icon + + -122.53781,37.858496,0 + + + + + 2007-01-14T22:33:43Z + + #hiker-icon + + -122.537671,37.858732,0 + + + + + 2007-01-14T22:34:02Z + + #hiker-icon + + -122.537542,37.858959,0 + + + + + 2007-01-14T22:34:24Z + + #hiker-icon + + -122.537381,37.859224,0 + + + + + 2007-01-14T22:34:48Z + + #hiker-icon + + -122.537179,37.859489,0 + + + + + 2007-01-14T22:35:09Z + + #hiker-icon + + -122.536991,37.859723,0 + + + + + 2007-01-14T22:35:31Z + + #hiker-icon + + -122.536767,37.859953,0 + + + + + 2007-01-14T22:35:52Z + + #hiker-icon + + -122.536537,37.860164,0 + + + + + 2007-01-14T22:36:12Z + + #hiker-icon + + -122.53632,37.860365,0 + + + + + 2007-01-14T22:36:15Z + + #hiker-icon + + -122.536283,37.860388,0 + + + + + 2007-01-14T22:36:20Z + + #paddle-b + + -122.536248,37.860445,0 + + + + \ No newline at end of file From 303a11a9441e4274f53ae0ec11fcb73fc7144c30 Mon Sep 17 00:00:00 2001 From: sbastiangarzon Date: Mon, 4 Jan 2021 17:22:11 +0100 Subject: [PATCH 2/6] Skip macOS test for *ml --- tests/test_api.py | 1 + tests/test_cli.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/tests/test_api.py b/tests/test_api.py index 259ee17..1dc3c2c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -37,6 +37,7 @@ def test_kml_extract_bbox(): assert result["crs"] == "4326" +@pytest.mark.skipif(sys.platform == "darwin", reason="MacOS does not load the file properly") def test_kml_extract_tbox(): result = geoextent.fromFile("tests/testdata/kml/TimeStamp_example.kml", bbox=True) assert "tbox" in result diff --git a/tests/test_cli.py b/tests/test_cli.py index 223a55d..17a34c6 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,4 +1,5 @@ import os # used to get the location of the testdata +import sys import pytest import tempfile from help_functions_test import create_zip, parse_coordinates, tolerance @@ -153,6 +154,7 @@ def test_netcdf_time_invalid(script_runner): assert ret.stderr == 'invalid time format', "stderr should not be empty" +@pytest.mark.skipif(sys.platform == "darwin", reason="MacOS does not load the file properly") def test_kml_bbox(script_runner): ret = script_runner.run('geoextent', '-b', 'tests/testdata/kml/aasee.kml') result = ret.stdout From c630208c80eaa30e6116b7b9d698300bee4f0e40 Mon Sep 17 00:00:00 2001 From: sbastiangarzon Date: Mon, 4 Jan 2021 17:40:37 +0100 Subject: [PATCH 3/6] Correct test to skip- macOs --- tests/test_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_cli.py b/tests/test_cli.py index 17a34c6..e396dda 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -154,7 +154,6 @@ def test_netcdf_time_invalid(script_runner): assert ret.stderr == 'invalid time format', "stderr should not be empty" -@pytest.mark.skipif(sys.platform == "darwin", reason="MacOS does not load the file properly") def test_kml_bbox(script_runner): ret = script_runner.run('geoextent', '-b', 'tests/testdata/kml/aasee.kml') result = ret.stdout @@ -163,6 +162,7 @@ def test_kml_bbox(script_runner): assert "4326" in result +@pytest.mark.skipif(sys.platform == "darwin", reason="MacOS does not load the file properly") def test_kml_time(script_runner): ret = script_runner.run('geoextent', '-t', 'tests/testdata/kml/TimeStamp_example.kml') assert ret.success, "process should return success" From c78064125ed080bc244a742da9f571547573f765 Mon Sep 17 00:00:00 2001 From: sbastiangarzon Date: Tue, 5 Jan 2021 16:02:28 +0100 Subject: [PATCH 4/6] Fixes #104 #111 --- geoextent/lib/extent.py | 75 +++++++++++--------- geoextent/lib/handleCSV.py | 28 +++++--- geoextent/lib/handleRaster.py | 25 +++---- geoextent/lib/handleVector.py | 26 +++---- geoextent/lib/helpfunctions.py | 44 ++++++++---- tests/test_api_shapefile.py | 5 +- tests/test_cli.py | 9 ++- tests/testdata/shapefile/ifgi_denkpause.dbf | Bin 0 -> 212 bytes tests/testdata/shapefile/ifgi_denkpause.prj | 1 + tests/testdata/shapefile/ifgi_denkpause.qpj | 1 + tests/testdata/shapefile/ifgi_denkpause.shp | Bin 0 -> 484 bytes tests/testdata/shapefile/ifgi_denkpause.shx | Bin 0 -> 148 bytes 12 files changed, 124 insertions(+), 90 deletions(-) create mode 100644 tests/testdata/shapefile/ifgi_denkpause.dbf create mode 100644 tests/testdata/shapefile/ifgi_denkpause.prj create mode 100644 tests/testdata/shapefile/ifgi_denkpause.qpj create mode 100644 tests/testdata/shapefile/ifgi_denkpause.shp create mode 100644 tests/testdata/shapefile/ifgi_denkpause.shx diff --git a/geoextent/lib/extent.py b/geoextent/lib/extent.py index 373ec2e..71a02b4 100644 --- a/geoextent/lib/extent.py +++ b/geoextent/lib/extent.py @@ -11,7 +11,8 @@ from . import helpfunctions as hf logger = logging.getLogger("geoextent") -handle_modules = {'CSV': handleCSV, "raster":handleRaster, "vector":handleVector} +handle_modules = {'CSV': handleCSV, "raster": handleRaster, "vector": handleVector} + def computeBboxInWGS84(module, path): ''' @@ -19,24 +20,29 @@ def computeBboxInWGS84(module, path): input "path": type string, path to file \n returns a bounding box, type list, length = 4 , type = float, schema = [min(longs), min(lats), max(longs), max(lats)], the boudning box has either its original crs or WGS84 (transformed). ''' - spatial_extent_org = module.getBoundingBox(path) - try: + logger.debug("computeBboxInWGS84: {}".format(path)) + spatial_extent_origin = module.getBoundingBox(path) - if spatial_extent_org['crs'] != str(hf.WGS84_EPSG_ID): - bbox_WGS84 = hf.transformingArrayIntoWGS84(spatial_extent_org['crs'], spatial_extent_org['bbox']) + try: + if spatial_extent_origin['crs'] == str(hf.WGS84_EPSG_ID): + spatial_extent = spatial_extent_origin else: - bbox_WGS84 = spatial_extent_org['bbox'] - except: - raise Exception("The bounding box could not be related to a CRS") - - spatial_extent = {'bbox': bbox_WGS84, 'crs': str(hf.WGS84_EPSG_ID)} + spatial_extent = {'bbox': hf.transformingArrayIntoWGS84(spatial_extent_origin['crs'], + spatial_extent_origin['bbox']), + 'crs': str(hf.WGS84_EPSG_ID)} + except Exception as e: + raise Exception("The bounding box could not be transformed to the target CRS epsg:{}".format(hf.WGS84_EPSG_ID)) return spatial_extent def fromDirectory(path, bbox=False, tbox=False): - ''' TODO: implement - ''' + """ Extracts geoextent from a directory/ZipFile + Keyword arguments: + path -- directory/ZipFile path + bbox -- True if bounding box is requested (default False) + tbox -- True if time box is requested (default False) + """ logger.info("Extracting bbox={} tbox={} from Directory {}".format(bbox, tbox, path)) @@ -57,14 +63,14 @@ def fromDirectory(path, bbox=False, tbox=False): path = extract_folder for filename in os.listdir(path): - logger.info("path {}, folder/zipfile {}".format(path,filename)) + logger.info("path {}, folder/zipfile {}".format(path, filename)) isZip = zipfile.is_zipfile(os.path.join(path, filename)) if isZip: logger.info("**Inspecting folder {}, is zip ? {}**".format(filename, str(isZip))) - metadata_directory[filename] = fromDirectory(os.path.join(path,filename),bbox,tbox) + metadata_directory[filename] = fromDirectory(os.path.join(path, filename), bbox, tbox) else: logger.info("Inspecting folder {}, is zip ? {}".format(filename, str(isZip))) - if os.path.isdir(os.path.join(path,filename)): + if os.path.isdir(os.path.join(path, filename)): metadata_directory[filename] = fromDirectory(os.path.join(path, filename), bbox, tbox) else: metadata_file = fromFile(os.path.join(path, filename), bbox, tbox) @@ -74,35 +80,36 @@ def fromDirectory(path, bbox=False, tbox=False): metadata['format'] = file_format if bbox: - bbox_ext = hf.bbox_merge(metadata_directory,path) + bbox_ext = hf.bbox_merge(metadata_directory, path) if bbox_ext is not None: - metadata['crs'] = "4326" - metadata['bbox'] = bbox_ext + if len(bbox_ext) != 0: + metadata['crs'] = bbox_ext['crs'] + metadata['bbox'] = bbox_ext['bbox'] else: - logger.warning("The {} {} has no identifiable bbox - Coordinate reference system (CRS) may be missing".format(file_format,path)) + logger.warning( + "The {} {} has no identifiable bbox - Coordinate reference system (CRS) may be missing".format( + file_format, path)) if tbox: - tbox_ext = hf.tbox_merge(metadata_directory,path) + tbox_ext = hf.tbox_merge(metadata_directory, path) if tbox_ext is not None: metadata['tbox'] = tbox_ext else: - logger.warning("The {} {} has no identifiable time extent".format(file_format,path)) + logger.warning("The {} {} has no identifiable time extent".format(file_format, path)) - #metadata['details'] = metadata_directory + # metadata['details'] = metadata_directory return metadata + def fromFile(filePath, bbox=True, tbox=True, num_sample=None): - ''' TODO: update these docs - - function is called when filePath is included in commandline (with tag 'b') - how this is done depends on the file format - the function calls the handler for each supported format \n - extracted data are bounding box, temporal extent and crs, a seperate thread is dedicated to each extraction process \n - input "filePath": type string, path to file from which the metadata shall be extracted \n - input "whatMetadata": type string, specifices which metadata should be extracted \n - returns None if the format is not supported, else returns the metadata of the file as a dict - (possible) keys of the dict: 'temporal_extent', 'bbox', 'vector_reps', 'crs' - ''' + """ Extracts geoextent from a file + Keyword arguments: + path -- filepath + bbox -- True if bounding box is requested (default False) + tbox -- True if time box is requested (default False) + num_sample -- sample size to determine time format (Only required for csv files) + """ logger.info("Extracting bbox={} tbox={} from file {}".format(bbox, tbox, filePath)) if bbox == False and tbox == False: @@ -122,7 +129,7 @@ def fromFile(filePath, bbox=True, tbox=True, num_sample=None): valid = handle_modules[i].checkFileSupported(filePath) if valid: usedModule = handle_modules[i] - logger.info("{} is being used to inspect {} file".format(usedModule.get_handler_name(),filePath)) + logger.info("{} is being used to inspect {} file".format(usedModule.get_handler_name(), filePath)) break # If file format is not supported @@ -171,7 +178,7 @@ def run(self): thread_bbox_except = thread("bbox") thread_temp_except = thread("tbox") - logger.debug("Starting 3 threads for extraction.") + logger.debug("Starting 2 threads for extraction.") thread_bbox_except.start() thread_temp_except.start() diff --git a/geoextent/lib/handleCSV.py b/geoextent/lib/handleCSV.py index d7d5492..8fc227d 100644 --- a/geoextent/lib/handleCSV.py +++ b/geoextent/lib/handleCSV.py @@ -7,19 +7,21 @@ logger = logging.getLogger("geoextent") -search = { "longitude" : ["(.)*longitude","(.)*long(.)*", "^lon","lon$","(.)*lng(.)*", "^x","x$"], - "latitude" : ["(.)*latitude(.)*", "^lat","lat$", "^y","y$"], - "time":["(.)*timestamp(.)*", "(.)*datetime(.)*", "(.)*time(.)*", "date$","^date"]} +search = {"longitude": ["(.)*longitude", "(.)*long(.)*", "^lon", "lon$", "(.)*lng(.)*", "^x", "x$"], + "latitude": ["(.)*latitude(.)*", "^lat", "lat$", "^y", "y$"], + "time": ["(.)*timestamp(.)*", "(.)*datetime(.)*", "(.)*time(.)*", "date$", "^date"]} + + def get_handler_name(): return "handleCSV" + def checkFileSupported(filepath): '''Checks whether it is valid CSV or not. \n input "path": type string, path to file which shall be extracted \n raise exception if not valid ''' - logger.info(filepath) try: file = gdal.OpenEx(filepath) driver = file.GetDriver().ShortName @@ -45,6 +47,7 @@ def getBoundingBox(filePath): input "filepath": type string, file path to csv file \n returns spatialExtent: type list, length = 4 , type = float, schema = [min(longs), min(lats), max(longs), max(lats)] ''' + with open(filePath) as csv_file: # To get delimiter either comma or simecolon daten = hf.getDelimiter(csv_file) @@ -53,10 +56,11 @@ def getBoundingBox(filePath): for x in daten: elements.append(x) - spatialLatExtent = hf.searchForParameters(elements, search['latitude'], exp_data= 'numeric') + spatialLatExtent = hf.searchForParameters(elements, search['latitude'], exp_data='numeric') minlat = None maxlat = None + if spatialLatExtent is None: pass else: @@ -72,13 +76,14 @@ def getBoundingBox(filePath): maxlon = (max(spatialLonExtent)) bbox = [minlon, minlat, maxlon, maxlat] + logger.debug("Extracted Bounding box (without projection): {}".format(bbox)) crs = getCRS(filePath) + logger.debug("Extracted CRS: {}".format(crs)) spatialExtent = {"bbox": bbox, "crs": crs} - logger.debug(bbox) if not bbox or not crs: raise Exception("Bounding box could not be extracted") - return spatialExtent + return spatialExtent def getTemporalExtent(filePath, num_sample): ''' extract time extent from csv string \n @@ -93,9 +98,8 @@ def getTemporalExtent(filePath, num_sample): elements = [] for x in daten: elements.append(x) - logger.info("Elements {}".format(elements)) - all_temporal_extent = hf.searchForParameters(elements, search['time'], exp_data = "time" ) + all_temporal_extent = hf.searchForParameters(elements, search['time'], exp_data="time") if all_temporal_extent is None: raise Exception('The csv file from ' + filePath + ' has no TemporalExtent') else: @@ -114,13 +118,15 @@ def getCRS(filePath): '''extracts coordinatesystem from csv File \n input "filepath": type string, file path to csv file \n returns the epsg code of the used coordinate reference system, type list, contains extracted coordinate system of content from csv file - ''' + ''' + with open(filePath) as csv_file: daten = csv.reader(csv_file.readlines()) elements = [] for x in daten: elements.append(x) - if hf.searchForParameters(elements,search['latitude']+search['longitude']) is None: + + if hf.searchForParameters(elements, search['latitude'] + search['longitude']) is None: if hf.searchForParameters(elements, ["crs","srsID"]) is None: raise Exception('The csv file from ' + filePath + ' has no CRS') if hf.searchForParameters(elements, ["crs","srsID"]) == "WGS84": diff --git a/geoextent/lib/handleRaster.py b/geoextent/lib/handleRaster.py index f3e7af8..c1b71fa 100644 --- a/geoextent/lib/handleRaster.py +++ b/geoextent/lib/handleRaster.py @@ -41,18 +41,20 @@ def getBoundingBox(filePath): returns bounding box of the file: type list, length = 4 , type = float, schema = [min(longs), min(lats), max(longs), max(lats)] ''' # Enable exceptions + + crs_output = hf.WGS84_EPSG_ID gdal.UseExceptions() geotiffContent = gdal.Open(filePath) # get the existing coordinate system - old_cs = osr.SpatialReference() - old_cs.ImportFromWkt(geotiffContent.GetProjectionRef()) + old_crs = osr.SpatialReference() + old_crs.ImportFromWkt(geotiffContent.GetProjectionRef()) # create the new coordinate system - new_cs = osr.SpatialReference() - new_cs.ImportFromEPSG(hf.WGS84_EPSG_ID) + new_crs = osr.SpatialReference() + new_crs.ImportFromEPSG(crs_output) # get the point to transform, pixel (0,0) in this case width = geotiffContent.RasterXSize @@ -64,7 +66,7 @@ def getBoundingBox(filePath): maxx = gt[0] + width * gt[1] + height * gt[2] maxy = gt[3] - transform = osr.CoordinateTransformation(old_cs, new_cs) + transform = osr.CoordinateTransformation(old_crs, new_crs) # get the coordinates in lat long latlongmin = transform.TransformPoint(minx, miny) latlongmax = transform.TransformPoint(maxx, maxy) @@ -72,23 +74,14 @@ def getBoundingBox(filePath): bbox = [latlongmin[0], latlongmin[1], latlongmax[0], latlongmax[1]] if int(osgeo.__version__[0]) >= 3: - if old_cs.GetAxisMappingStrategy() == 1: + if old_crs.GetAxisMappingStrategy() == 1: bbox = [latlongmin[1], latlongmin[0], latlongmax[1], latlongmax[0]] - spatialExtent = {"bbox": bbox, "crs": str(hf.WGS84_EPSG_ID)} + spatialExtent = {"bbox": bbox, "crs": str(crs_output)} return spatialExtent -def getCRS(filePath): - ''' gets the coordinate reference systems from the geotiff file \n - input "filepath": type string, file path to geotiff file \n - return epsg code of the used coordiante reference system: type int - ''' - - return "4326" - - def getTemporalExtent(filePath): ''' extracts temporal extent of the geotiff \n input "filepath": type string, file path to geotiff file \n diff --git a/geoextent/lib/handleVector.py b/geoextent/lib/handleVector.py index 8963909..59a2914 100644 --- a/geoextent/lib/handleVector.py +++ b/geoextent/lib/handleVector.py @@ -47,15 +47,18 @@ def getTemporalExtent(filepath): layer_count = datasource.GetLayerCount() logger.debug("{} contains {} layers".format(filepath, layer_count)) datetime_list = [] + for layer in datasource: logger.debug("{} : Extracting temporal extent from layer {} ".format(filepath, layer)) layerDefinition = layer.GetLayerDefn() field_names = [] - logger.debug(layerDefinition.GetFieldCount()) + for i in range(layerDefinition.GetFieldCount()): field_names.append(layerDefinition.GetFieldDefn(i).GetName()) + logger.debug("Found {} fields : {}".format(layerDefinition.GetFieldCount(), str(field_names))) + match_list = [] for x in search["time"]: term = re.compile(x, re.IGNORECASE) @@ -63,6 +66,7 @@ def getTemporalExtent(filepath): match = term.search(j) if match is not None: match_list.append(j) + logger.debug("Features name match: {}".format(match_list)) if len(match_list) == 0: @@ -72,29 +76,25 @@ def getTemporalExtent(filepath): datetime_list = [] for time_feature in match_list: time_list = [] - logger.debug("Time feature: {}".format(time_feature)) for feat in layer: time = feat.GetField(time_feature) - logger.debug("time {}".format(time)) if time is not None: time_list.append(time) layer.ResetReading() if len(time_list) != 0: - logger.debug("Time_list: {}".format(time_list)) parsed_time = hf.date_parser(time_list) if parsed_time is not None: datetime_list.extend(parsed_time) else: - logger.debug('File:{} /Layer: {}: : Matched temporal extent "{}"' - 'field do not have recognizable time format'.format(filepath, layer, time_feature)) + logger.debug('File {} / Layer {} \n' + ' {} feature do not have recognizable time format'.format(filepath, layer, time_feature)) pass else: - logger.debug("File:{} / Layer: {}: No values found in {} fields." - .format(filepath, layer, time_feature)) + logger.debug('File {} / Layer {} \n' + ' No values found in {} field'.format(filepath, layer, time_feature)) pass - logger.debug(match_list) if len(datetime_list) == 0: logger.debug("File {} do not have recognizable temporal extent".format(filepath)) return None @@ -111,6 +111,7 @@ def getBoundingBox(filepath): """ datasource = ogr.Open(filepath) geo_dict = {} + crs_output = hf.WGS84_EPSG_ID for layer in datasource: layer_name = layer.GetDescription() @@ -135,10 +136,9 @@ def getBoundingBox(filepath): bbox_merge = hf.bbox_merge(geo_dict, filepath) if bbox_merge is not None: - crs = str(hf.WGS84_EPSG_ID) + if len(bbox_merge) != 0: + spatialExtent = bbox_merge else: - crs = None - - spatialExtent = {"bbox": bbox_merge, "crs": crs} + spatialExtent = None return spatialExtent diff --git a/geoextent/lib/helpfunctions.py b/geoextent/lib/helpfunctions.py index 77b09d3..f9974df 100644 --- a/geoextent/lib/helpfunctions.py +++ b/geoextent/lib/helpfunctions.py @@ -29,8 +29,12 @@ def getAllRowElements(rowname, elements, exp_data=None): indexOf = idx values = [] for x in elements: - if x[indexOf] != rowname: - values.append(x[indexOf].replace(" ", "")) + try: + if x[indexOf] != rowname: + values.append(x[indexOf].replace(" ", "")) + except IndexError as e: + logger.info("Row skipped,file might be corrupted. Error {}".format(e)) + pass if exp_data == 'time': if get_time_format(values, 30) is not None: @@ -38,15 +42,26 @@ def getAllRowElements(rowname, elements, exp_data=None): elif exp_data == 'numeric': try: - values_num = list(map(float, values)) - return values_num - except: + values_num = list(map(float_convert, values)) + values_num_none = [i for i in values_num if i] + if len(values_num_none) == 0: + return None + else: + return values_num_none + except Exception as e: + logger.debug(e) return None - else: return values +def float_convert(val): + try: + return float(val) + except ValueError: + pass + + def searchForParameters(elements, paramArray, exp_data=None): ''' Function purpose: return all attributes of a elements in the first row of a file \n @@ -54,7 +69,6 @@ def searchForParameters(elements, paramArray, exp_data=None): Input: paramArray, elements \n Output: getAllRowElements(x,elements) ''' - matching_elements = [] for x in paramArray: for row in elements[0]: @@ -216,7 +230,7 @@ def extract_zip(zippedFile): def bbox_merge(metadata, origin): logger.debug("medatada {}".format(metadata)) boxes_extent = [] - metadata_merge = [] + metadata_merge = {} num_files = len(metadata.items()) for x, y in metadata.items(): if isinstance(y, dict): @@ -233,9 +247,9 @@ def bbox_merge(metadata, origin): elif len(boxes_extent) > 0: multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - des_srs = ogr.osr.SpatialReference() - des_srs.ImportFromEPSG(WGS84_EPSG_ID) - multipolygon.AssignSpatialReference(des_srs) + des_crs = ogr.osr.SpatialReference() + des_crs.ImportFromEPSG(WGS84_EPSG_ID) + multipolygon.AssignSpatialReference(des_crs) for bbox in boxes_extent: @@ -250,7 +264,7 @@ def bbox_merge(metadata, origin): if bbox[1] != str(WGS84_EPSG_ID): source = osr.SpatialReference() source.ImportFromEPSG(int(bbox[1])) - transform = osr.CoordinateTransformation(source, des_srs) + transform = osr.CoordinateTransformation(source, des_crs) box.Transform(transform) polygon = ogr.Geometry(ogr.wkbPolygon) @@ -258,7 +272,8 @@ def bbox_merge(metadata, origin): multipolygon.AddGeometry(polygon) except Exception as e: - logger.debug("Error extracting geographic extent of {}. CRS {} may be invalid. Error: {}".format(x,bbox[1], e)) + logger.debug( + "Error extracting geographic extent of {}. CRS {} may be invalid. Error: {}".format(x, bbox[1], e)) continue num_geo_files = multipolygon.GetGeometryCount() / 4 @@ -266,7 +281,8 @@ def bbox_merge(metadata, origin): logger.debug('{} contains {} geometries out of {} with identifiable geographic extent'.format(origin, int( num_geo_files), num_files)) env = multipolygon.GetEnvelope() - metadata_merge = [env[0], env[2], env[1], env[3]] + metadata_merge['bbox'] = [env[0], env[2], env[1], env[3]] + metadata_merge['crs'] = str(WGS84_EPSG_ID) else: logger.debug(" {} does not have geometries with identifiable geographical extent (CRS+bbox)".format(origin)) metadata_merge = None diff --git a/tests/test_api_shapefile.py b/tests/test_api_shapefile.py index 509ba51..ee57077 100644 --- a/tests/test_api_shapefile.py +++ b/tests/test_api_shapefile.py @@ -25,5 +25,8 @@ def test_shapefile_extract_bbox_with_crs(): def test_shapefile_extract_time(): - result = geoextent.fromFile('tests/testdata/shapefile/Abgrabungen_Kreis_Kleve_Shape.shp', bbox=False, tbox=True) + result = geoextent.fromFile('tests/testdata/shapefile/ifgi_denkpause.shp', bbox=False, tbox=True) assert "bbox" not in result + assert "crs" not in result + assert "tbox" in result + assert result['tbox'] == ['2021-01-01', '2021-01-01'] diff --git a/tests/test_cli.py b/tests/test_cli.py index e396dda..3156285 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -246,12 +246,19 @@ def test_gml_only_one_time_feature_valid(script_runner): assert "'tbox': ['2012-04-15', '2012-04-15']" in ret.stdout, "time value is printed to console" -def test_shp_bbox(script_runner): +def test_shp_bbox_no_crs(script_runner): ret = script_runner.run('geoextent', '-b', 'tests/testdata/shapefile/Abgrabungen_Kreis_Kleve_Shape.shp') assert ret.success, "process should return success" assert "'bbox'" not in ret.stdout +def test_shp_tbox(script_runner): + ret = script_runner.run('geoextent', '-t', 'tests/testdata/shapefile/ifgi_denkpause.shp') + assert ret.success, "process should return success" + assert "'tbox'" in ret.stdout + assert "['2021-01-01', '2021-01-01']" in ret.stdout + + @pytest.mark.skip(reason="multiple input files not implemented yet") def test_multiple_files(script_runner): ret = script_runner.run('python', 'geoextent', diff --git a/tests/testdata/shapefile/ifgi_denkpause.dbf b/tests/testdata/shapefile/ifgi_denkpause.dbf new file mode 100644 index 0000000000000000000000000000000000000000..66f5f8c49c6264b826681fa269d65680060afc84 GIT binary patch literal 212 zcmZRsWMpAuU|>jO5C)Q%ATtFn<_BVN!MPAdNoHu`542=wo b3=Mz~E}~$BDP)W(WP&MViYa6U6OsY|?w1%x literal 0 HcmV?d00001 diff --git a/tests/testdata/shapefile/ifgi_denkpause.prj b/tests/testdata/shapefile/ifgi_denkpause.prj new file mode 100644 index 0000000..a30c00a --- /dev/null +++ b/tests/testdata/shapefile/ifgi_denkpause.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] \ No newline at end of file diff --git a/tests/testdata/shapefile/ifgi_denkpause.qpj b/tests/testdata/shapefile/ifgi_denkpause.qpj new file mode 100644 index 0000000..5fbc831 --- /dev/null +++ b/tests/testdata/shapefile/ifgi_denkpause.qpj @@ -0,0 +1 @@ +GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]] diff --git a/tests/testdata/shapefile/ifgi_denkpause.shp b/tests/testdata/shapefile/ifgi_denkpause.shp new file mode 100644 index 0000000000000000000000000000000000000000..ae9927c3264e7a93263f66c8cd4b4dfc84da1384 GIT binary patch literal 484 zcmZQzQ0HR64nDnLW?CmN5pvkcsgL$ zjv~qk%Y1a z{vN1b?&FDfj6nV9e2{*0F`y6&O#d9wm(%-!`WKdM(_secN9Tj|ql*EBSYi4AN(f|Me literal 0 HcmV?d00001 diff --git a/tests/testdata/shapefile/ifgi_denkpause.shx b/tests/testdata/shapefile/ifgi_denkpause.shx new file mode 100644 index 0000000000000000000000000000000000000000..cd5bf18fe56f1be3d2145a52f6391addacf1876a GIT binary patch literal 148 zcmZQzQ0HR64!mA4Gca%f Date: Tue, 5 Jan 2021 17:54:03 +0100 Subject: [PATCH 5/6] Fixes #105 and adds source references --- tests/test_api.py | 7 +++---- tests/test_cli.py | 8 ++++++++ tests/testdata/data-sources.md | 3 ++- .../geopackage/wandelroute_maastricht.gpkg | Bin 0 -> 98304 bytes 4 files changed, 13 insertions(+), 5 deletions(-) create mode 100644 tests/testdata/geopackage/wandelroute_maastricht.gpkg diff --git a/tests/test_api.py b/tests/test_api.py index 1dc3c2c..39b0e38 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -167,10 +167,9 @@ def test_netcdf_extract_bbox_time(): [-90.0, 0.0, 90.0, 357.5], ['2002-07-01', '2002-07-31']] -def test_gpkg_extract_bboxs(): - result = geoextent.fromFile("tests/testdata/geopackage/nc.gpkg", bbox=True) - assert result['bbox'] == pytest.approx([-84.323835, 33.882102, -75.456585, 36.589757], abs=tolerance) - assert result["crs"] == "4326" +def test_gpkg_extract_tbox(): + result = geoextent.fromFile("tests/testdata/geopackage/wandelroute_maastricht.gpkg", tbox=True) + assert result['tbox'] == ['2021-01-05', '2021-01-05'] @pytest.mark.skipif("TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", diff --git a/tests/test_cli.py b/tests/test_cli.py index 3156285..2cc7bea 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -199,6 +199,14 @@ def test_gpkg_bbox(script_runner): assert "4326" in result +def test_gpkg_tbox(script_runner): + ret = script_runner.run('geoextent', '-t', 'tests/testdata/geopackage/wandelroute_maastricht.gpkg') + result = ret.stdout + assert ret.success, "process should return success" + assert ret.stderr == '', "stderr should be empty" + assert "['2021-01-05', '2021-01-05']" in result + + def test_csv_bbox(script_runner): ret = script_runner.run('geoextent', '-b', 'tests/testdata/csv/cities_NL.csv') assert ret.success, "process should return success" diff --git a/tests/testdata/data-sources.md b/tests/testdata/data-sources.md index f14e005..0d4f88b 100644 --- a/tests/testdata/data-sources.md +++ b/tests/testdata/data-sources.md @@ -19,7 +19,8 @@ | [Carto BCN / martgnz] | Barcelona districts | geojson | CC-BY 4.0 | | [Google] | abstractviews_timeprimitive_example | kml | CC-BY 4.0 | | [Tkrajina] | gpx1.1_with_all_fields.gpx | gpx | Apache License 2.0 | - +| [geoextent] | ifgi_to_denkpause.sph | shp | PDDL | +| [geoextent] | wandelroute_maastricht.gpkg | gpkg | PDDL | [sf]: https://github.com/r-spatial/sf [geoextent]: https://github.com/o2r-project/geoextent diff --git a/tests/testdata/geopackage/wandelroute_maastricht.gpkg b/tests/testdata/geopackage/wandelroute_maastricht.gpkg new file mode 100644 index 0000000000000000000000000000000000000000..df15449082688f62d5869dd298b0463a1d13bc62 GIT binary patch literal 98304 zcmeI5dvGI3dB8RH+VWf8gFPMW?QO3PBa4++mTa%r-d&Jv&w7Pz$t%fcY;!dwOJjMm z(nvQmUfY`s!S>z}l1f2w;dq3CPU^YQ74aYMNAYrEr2KWTuFI@c?5_&u&)yIyId+CJR8 z-1^1VtIl^cpKMrlUOaNH?#I-V#tZR#g}~-mr}OwsPt%Q~lECG1tdJ{R%`p;}XBnt8 zfm_qPRW2(nGaN6nf@GB$P0*1PO;U;I_&A*)UTb`{G`wVlz7$W;BpOT7i4=*)?ALh- zNDw+QdYL5Rvv9$9G)9v2Bt4oUiAXd_cSlCziIkss)qIg6Po(r>o+Vye9=u+1jVtEU z5-XBSkryQ)&GFJO(MqWlk7Uw3UzErqOY*EJ5@|Wjlc3i}W-rr_)9x5Od&VSxfdqZ$ z2^|~Te5=cO{0d+^wG+nc#bSZUv)9->vr@`JHms0t3elEA8R5+In9RSpL@F`|1ghq7 zKR^vxsD`uGjME@}f%IxHFLbCd8)|^LPm8SR)oi*9^ETUKC1X{su?Bowqis%iPY?Cz zkkS^cc4j?q*9|W+GD#C_Ea_GfVfb``B`NxCDH4l=o0**S8*&B17X2F+`Y5xMIu8H>-z1uADrja;uDIU0{8Q_!WvQlxa1 zk)#5dPDH07i8(Ss&lwr`4Nc7FH(a&^mlX<(ftK96MaN^-6nrE>UxMx=HcBT|+|V6K zEH8-$)IQ%^+8f=;9?HS-S$0i)C=VkklU|XE$~j}LCMIYN>Daoj#pxaxpdL~4WQ}J; z*4B^9VXeEGgDhdhVKxaPG`}c_441XnZh_;OHCye-q3ib0^jbAp2(`mnx=#9LMw zmU>pwhT_SiQ>BL0^4e=xETttboo57gi4oUD(ya;l## zDjJ)UTVc7`7VJTWnuf+IE0aqp8m-TmZ9k0nG6N+erZ&3*S7(e?G82nlnSp`6(uz&; z(8gtX-dynp;hLJZ)tWNQ!EGtp+~|IAa93L@SBhp4OsJ|&v_zZso|M}AST!_n+}+@G z2LjZ_Ld7tPIRR=*?eLTg7y}yZ25F2o)XOd%&k~!KRsl=xh#wt z+!DtMYJ_UN{wynI1g<1;a5t=s*m}{uUVlC?C{cdcQ)7PN+-pEeBD*M6yL`1+;t47N$0TW!`;%GkT*s~Cr6 z98hfE5>{t76^n#AdQ`a?+o4A__KE(YkW2I2br_Cx%P9A<#wchwMIj4ARn_4;%Pw&| zrzBx9N6qe__?`E{!PO~G^6T55b94mTpX+!WPVj>SkN^@u0!RP}AOR$R1dsp{Kmvz_ zz}p?pi;iPWCkr#MEDaMuHcMdDCnuy!%Up&e6UkUSMz82`H&@C1y&k|-;% ze1;`3+mzVC!f*AD^St92b@FOOOC~MAlRFNc$7xD_TTZAJ@^J&Mi zx*P2qCuYZ!Ki!Q=a0;!F2@tm)ZBtcPTAx44EFZ*dLDeRO3c~5 z&{;V~e(penwsC=F`Bg5n?D6+j5kEK78pc$m#Saod0!RP}AOR$R1dsp{Kmter2_OL^@ahmKQmug#f%c0| z_|z5B@RuM0EHCt=IiXY(B<-(eL|a6tHx%sYh2McxaA0_#Z@52rCNvnF54#)Q^U$fk zng~aQ|NDC3?Y|DYyI;DI{?$w2=(Q#J)A@rIC>Q1Wu>*ebg9MNO5a3W4siZ|_$wAmfCP{L5il2* z`~Mdmt``lG_!S8t0VIF~kN^@u0!RP}AOR$R1dsp{IGhBIQS|}J_V~XOpZ_1u>cm_l z0VIF~kN^@u0!RP}AOR$R1dsp{*nz+<&;P&eaD97+t1uV|AOR$R1dsp{Kmter2_OL^ zfCP{L61W2i_$g=L#HqTbCfmb)eecgH*kdy~9fvJk_4oh1;BdWg2NnghganWP5qc z;_@t0NJG$??yYiJX_;9mWz!OCl^IRYkrYi*iRkz^ogiLoe6=*ZWQ4vHPtas$dQ84J zeu-!b6-^`_v!~-Fa0Q_wqnAk{J_`wqM`I*OPtv0)l88i;ba!MVo=EwLSIs9W@=5`T`01&J#K|wmH`6RInbc2`k5otRPvj?G3A%Mj2}~mZTG@17cLuyp0&&>T({x z0@itICydvN#R8LOud(_4+NLdqGD6KVi%X;;b3mZ`w@X77s##^P*(wdv7f7#WlNUNv zm<=_++^0oW^fGsrJytSC_KEK;?Tzka59Q$aEW0K?l;DU@= zbGmzas7HsCKFJzyJ#W{We`I8mCe~Qett7(m=>khq^xIM-76&&oIq5eP750mvqXid|;8+_FS$(aNZx_RiJgcDk^{twH--V?^#cOUB|ea)HWOQX|)^M~=p0$rN<$ zu@os?WhALUrW4VrNMepm&~ruxenT7e`3;vX!DWR4W1uCEI??f%H3c6@(3hZFkB!nv z6*mkE63a`X0kzM!bzh6qJupB$q9$pr7e-`l{kR;~x@&F962{V)Yt&d~zbJ?dm$g?; zf#aDq8(cYb-5#1=t0oJfc3CsZoKz5R8B0qwDQQFTmupa1bySJjqZhkn%gOmx3^WfKH6?LoKE-PAoZ@3mG-K?gPF+`^D6~jRI1+^*krG3 ztH{ok?N?K-*09ToNb4oGn!rxBah;^1u{pUBm0MlG9%QI#Xlt@Exs;;O`i$B3!)=qy zK*@-y$*sWE8KafV#G+Sb;C`vn2uwbL-wZr`zkL9$BrF%~US^ zq~2a8J<4ckyA%j!GHFI`N@f{9mxWHBTjE$jjZm%EpJm02z?CEpCKbwns~6qt_2<)~ z#AIN0kc06JW;3biRAq!Cx?ndKfPZ5XPr z{ZiHN^-GVyJhn-m%Wl?F%GkT*s~Cr698hfE5>{t76^n#AdQ`cW+M!1^_KE(YkW2I2 zbr_Cx%P9A<#wchwMIj4ARn_4;%Pw&|rzBx9N2}fW+FM2(Ez}i9+tW?IbpCl`{Kzlr zpQ@W^Nw&>WSLC18CtH3FB9GTF`@e-EEhE$y+zSiT#<)^9f+Vo=5&|>~fy*rSq&cBf z6eML>VI^UmDWwHyW;KerO*QDL@hVR@Uxc`D%$4p!`0{c2z$|GjP-~KJBRg0qo%P z@cX|z^w>S_u%LiuRm!5Ie>gZW9~{VZSd@HytM%m%wzRj%y?%zNE$G`mmltlN##~$bU<_Ooj<$SV9{~reM|pU zI^_=Q{x9Y;U;ZiR(0#KGtCU4Y|8QtZ-!Gi~>bF*+Vcj?D zuu55U^vU(nzgTb4@zn4ObNyZJuR=w ze9|4({qT2MSCi2F>b_ZrRm!3xINTSU4=qv_9iRIAS1uiaI@f(F^})xNKu6g(b@5gy zi;mvmP~Ut|p8r#>7aj16A0&VTkN^@u0!RP}AOR$R1dsp{KmthMuoE~&)dwispX+q0 z^Z&;juE!300Wtqb00|%gB!C2v01`j~NB{{S0VIF~4i$l`RAb;o&qJqdlXd+cnw0=}4nKtL?9nP8|laDU~0wUz5u1$g09Pm#~B z%kTfET>s>NU;H2eB!C2v01`j~NB{{S0VIF~kN^@u0*8gbnFd%qXs&tfK>7cF*Jm8A zU%Os${oM5v*AHDUxW4Cl-u0a8Tdu!%ea-b1*Wb9l00Q_y0!RP}AOR$R1dsp{Kmter z2_OL^fCOGO0`;Bsj^=uoeCUu5?ed{bKD5e*7WvRDADZNYQ$94xhX(m@L_XBZhdTK{ z!9n@^|JEJ{{Ne`*AOR$R1dsp{Kmter2_OL@fp<^CTf8270|deQ7L^Yb-u{sX+#5zF zKGz3sKnt%6Z~y8!aQ{3q@!1c9`-m1^AAZ*Nad1zJOg!^LaQa)fj)b2b-voDKWa87; zz^!TW4dG{7-v#d7BNIzIs zcvE=$A07tx4hXvk-0QXQ=J57=S#Y_LiI05>+%sA@q`e03uUmWF$izp!4DN5W@Ye7) zJqa#0GVy_*f_q5|Zwqhx*T7vLnfTCM;9jeRw}-dKFN1sQ$izo!aN}BdM|eB?4RGHa znRp@v?g1^_72e+XJh(4FS$n_*wD8XG_SZfL4p@HXDR56~;pia&Bya!%^8CNtn;jq> zyc7u_0VIF~kN^@u0!RP}AOR$R1dsp{xN`}#w@o|RsW&>>j<#l6KGQ-qUvhq;;opyJ z*1eN@W5>_hztr|8cW&Nay_{{%zR`L7!9dfE9&M{u_3aC`|FU58S2SpwG56af^vzE7 ztzT|JVBBv zCMwc}lC-YuLSK>b`G~rethW8;r9^y+cy5+$kG6FzmxXO=Nq0F*{z`__jbgX{uuCEP z=pXEjY~QUS+xNR68;hqDv@&)0{yXG*u~OmZ%Xg$2Xm+Jj#%%T#*J9-&?^x6=i-2-N~MA9#b>wodQO0z$XS0zy^`xMbWr zdWY?1EiLU%XlN!wpkrfON}!UDB1u5AB&v(b?$ zr3IFkY7}$p_YBGg={FsEwZuK$d@;-NMR@?A$6>qp=|prYl9(eC^qgPSFAMlK{KV;= zouxJgYsZh3*EiPY|Gc|n(1s>O8Yi?PdeRmbJWK9+OjJYSGnv>^Q%Z5+hc~6 zi#*+mSCz6{BEIO`IDMbfJvG&|F;P>NZR<~dN-H# zlLE)D!7;riA31(q`M&PEc%spLG6xI@LIem?Z_-4R|yUr`r7(QKKedHp^7gt%K+iUB`z1po)pu0LTwcBQv z*tE1FuuP`7!b^-T6SnE$QPa^GX_KklZZ3=YY}JAEV6!pn HP2&FpSO#|u literal 0 HcmV?d00001 From b8802652b71d1c4c140c7a033ffd37e1fb1c80e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20N=C3=BCst?= Date: Tue, 5 Jan 2021 18:43:54 +0100 Subject: [PATCH 6/6] Update data-sources.md --- tests/testdata/data-sources.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/testdata/data-sources.md b/tests/testdata/data-sources.md index 0d4f88b..fd3c81a 100644 --- a/tests/testdata/data-sources.md +++ b/tests/testdata/data-sources.md @@ -19,7 +19,7 @@ | [Carto BCN / martgnz] | Barcelona districts | geojson | CC-BY 4.0 | | [Google] | abstractviews_timeprimitive_example | kml | CC-BY 4.0 | | [Tkrajina] | gpx1.1_with_all_fields.gpx | gpx | Apache License 2.0 | -| [geoextent] | ifgi_to_denkpause.sph | shp | PDDL | +| [geoextent] | ifgi_to_denkpause.shp | shp | PDDL | | [geoextent] | wandelroute_maastricht.gpkg | gpkg | PDDL | [sf]: https://github.com/r-spatial/sf @@ -31,4 +31,4 @@ [govdata]: https://www.govdata.de/web/guest/daten/-/details/abgrabungen [Carto BCN / martgnz]: https://github.com/martgnz/bcn-geodata/tree/master/districtes [Google]: https://developers.google.com/kml/documentation/time#gps -[Tkrajina]: https://github.com/tkrajina/gpxpy/blob/dev/test_files/gpx1.1_with_all_fields.gpx \ No newline at end of file +[Tkrajina]: https://github.com/tkrajina/gpxpy/blob/dev/test_files/gpx1.1_with_all_fields.gpx