Spaces:
Running
Running
tonyzzzzzz
commited on
Commit
•
340638d
1
Parent(s):
aa5319f
fix: async io error
Browse files
main.py
CHANGED
@@ -10,6 +10,7 @@ import pandas as pd
|
|
10 |
import geopandas as gpd
|
11 |
from pyproj.transformer import Transformer
|
12 |
import cv2
|
|
|
13 |
from matplotlib import patches as mpatches
|
14 |
from matplotlib import gridspec
|
15 |
|
@@ -75,18 +76,31 @@ def split_dataframe(df, chunk_size = 100):
|
|
75 |
chunks.append(df[i*chunk_size:(i+1)*chunk_size])
|
76 |
return chunks
|
77 |
|
78 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
TOTAL_LOOKED_INTO_LIMIT = 10000
|
80 |
|
81 |
################ FPV
|
82 |
-
|
83 |
bbox = get_city_boundary(query=location)
|
84 |
tiles = get_fpv.get_tiles_from_boundary(boundary_info=dict(bound_type="auto_bbox", bbox=bbox), zoom=14)
|
85 |
np.random.shuffle(tiles)
|
86 |
total_looked_into = 0
|
87 |
dfs_meta = list()
|
88 |
for tile in tiles:
|
89 |
-
image_points_response =
|
90 |
if image_points_response is None:
|
91 |
continue
|
92 |
try:
|
@@ -98,7 +112,7 @@ async def fetch(location, num_images, filter_undistort, disable_cam_filter, map_
|
|
98 |
|
99 |
for df in df_split:
|
100 |
image_ids = df["id"]
|
101 |
-
image_infos, num_fail =
|
102 |
df_meta = get_fpv.geojson_feature_list_to_pandas(image_infos.values())
|
103 |
|
104 |
# Some standardization of the data
|
@@ -114,9 +128,8 @@ async def fetch(location, num_images, filter_undistort, disable_cam_filter, map_
|
|
114 |
if total_rows > num_images:
|
115 |
break
|
116 |
elif total_looked_into > TOTAL_LOOKED_INTO_LIMIT:
|
117 |
-
|
118 |
-
"\nPlease rerun or run the data engine locally for bulk time consuming operations."
|
119 |
-
return
|
120 |
if total_rows > num_images:
|
121 |
break
|
122 |
except:
|
@@ -153,7 +166,7 @@ async def fetch(location, num_images, filter_undistort, disable_cam_filter, map_
|
|
153 |
metadata_fmt = "\n".join(desc)
|
154 |
# yield metadata_fmt, None, None
|
155 |
image_urls = list(df_meta.set_index("id")["thumb_2048_url"].items())
|
156 |
-
num_fail =
|
157 |
if num_fail > 0:
|
158 |
logger.error(f"Failed to download {num_fail} images.")
|
159 |
|
@@ -310,7 +323,7 @@ async def fetch(location, num_images, filter_undistort, disable_cam_filter, map_
|
|
310 |
fig_img = plt.imread(fig_img_path)
|
311 |
|
312 |
|
313 |
-
|
314 |
|
315 |
filter_pipeline = filters.FilterPipeline.load_from_yaml("MapItAnywhere/mia/fpv/filter_pipelines/mia.yaml")
|
316 |
filter_pipeline.verbose=False
|
|
|
10 |
import geopandas as gpd
|
11 |
from pyproj.transformer import Transformer
|
12 |
import cv2
|
13 |
+
import asyncio
|
14 |
from matplotlib import patches as mpatches
|
15 |
from matplotlib import gridspec
|
16 |
|
|
|
76 |
chunks.append(df[i*chunk_size:(i+1)*chunk_size])
|
77 |
return chunks
|
78 |
|
79 |
+
downloader = get_fpv.MapillaryDownloader(os.getenv("MLY_TOKEN"))
|
80 |
+
loop = asyncio.get_event_loop()
|
81 |
+
|
82 |
+
def generate_error_plot(error_message):
|
83 |
+
fig, ax = plt.subplots()
|
84 |
+
ax.text(0.5, 0.5, error_message, fontsize=12, va='center', ha='center', wrap=True)
|
85 |
+
ax.axis('off')
|
86 |
+
|
87 |
+
fig_img_path = 'fpv_bev.png'
|
88 |
+
fig.savefig(fig_img_path)
|
89 |
+
fig_img = plt.imread(fig_img_path)
|
90 |
+
return fig_img
|
91 |
+
|
92 |
+
def fetch(location, num_images, filter_undistort, disable_cam_filter, map_length, mpp):
|
93 |
TOTAL_LOOKED_INTO_LIMIT = 10000
|
94 |
|
95 |
################ FPV
|
96 |
+
|
97 |
bbox = get_city_boundary(query=location)
|
98 |
tiles = get_fpv.get_tiles_from_boundary(boundary_info=dict(bound_type="auto_bbox", bbox=bbox), zoom=14)
|
99 |
np.random.shuffle(tiles)
|
100 |
total_looked_into = 0
|
101 |
dfs_meta = list()
|
102 |
for tile in tiles:
|
103 |
+
image_points_response = loop.run_until_complete(downloader.get_tiles_image_points([tile]))
|
104 |
if image_points_response is None:
|
105 |
continue
|
106 |
try:
|
|
|
112 |
|
113 |
for df in df_split:
|
114 |
image_ids = df["id"]
|
115 |
+
image_infos, num_fail = loop.run_until_complete(get_fpv.fetch_image_infos(image_ids, downloader, infos_dir))
|
116 |
df_meta = get_fpv.geojson_feature_list_to_pandas(image_infos.values())
|
117 |
|
118 |
# Some standardization of the data
|
|
|
128 |
if total_rows > num_images:
|
129 |
break
|
130 |
elif total_looked_into > TOTAL_LOOKED_INTO_LIMIT:
|
131 |
+
return generate_error_plot(f"Went through {total_looked_into} images and could not find images satisfying the filters."
|
132 |
+
"\nPlease rerun or run the data engine locally for bulk time consuming operations.")
|
|
|
133 |
if total_rows > num_images:
|
134 |
break
|
135 |
except:
|
|
|
166 |
metadata_fmt = "\n".join(desc)
|
167 |
# yield metadata_fmt, None, None
|
168 |
image_urls = list(df_meta.set_index("id")["thumb_2048_url"].items())
|
169 |
+
num_fail = loop.run_until_complete(get_fpv.fetch_images_pixels(image_urls, downloader, raw_image_dir))
|
170 |
if num_fail > 0:
|
171 |
logger.error(f"Failed to download {num_fail} images.")
|
172 |
|
|
|
323 |
fig_img = plt.imread(fig_img_path)
|
324 |
|
325 |
|
326 |
+
return fig_img
|
327 |
|
328 |
filter_pipeline = filters.FilterPipeline.load_from_yaml("MapItAnywhere/mia/fpv/filter_pipelines/mia.yaml")
|
329 |
filter_pipeline.verbose=False
|