Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
update likes and downloads
Browse files- app.py +43 -20
- schema.sql +2 -0
app.py
CHANGED
@@ -142,15 +142,15 @@ async def get_all_new_models():
|
|
142 |
|
143 |
async def sync_data():
|
144 |
print("Fetching models")
|
145 |
-
|
146 |
-
print(f"Found {len(
|
147 |
# save list of all models for ids
|
148 |
with open(DB_FOLDER / "models.json", "w") as f:
|
149 |
-
json.dump(
|
150 |
# with open(DB_FOLDER / "models.json", "r") as f:
|
151 |
# new_models = json.load(f)
|
152 |
|
153 |
-
new_models_ids = [model['id'] for model in
|
154 |
|
155 |
# get existing models
|
156 |
with database.get_db() as db:
|
@@ -159,23 +159,39 @@ async def sync_data():
|
|
159 |
existing_models = [row['id'] for row in cursor.fetchall()]
|
160 |
models_ids_to_add = list(set(new_models_ids) - set(existing_models))
|
161 |
# find all models id to add from new_models
|
162 |
-
models = [model for model in
|
163 |
|
164 |
print(f"Found {len(models)} new models")
|
165 |
for model in tqdm(models):
|
166 |
model_id = model['id']
|
|
|
|
|
167 |
model_card = fetch_model_card(model_id)
|
168 |
images = await find_image_in_model_card(model_card)
|
169 |
classifier = run_classifier(images)
|
170 |
# update model row with image and classifier data
|
171 |
with database.get_db() as db:
|
172 |
cursor = db.cursor()
|
173 |
-
cursor.execute("INSERT INTO models(id, data) VALUES (?, ?)",
|
174 |
-
[model_id,
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
179 |
db.commit()
|
180 |
|
181 |
if (len(models) > 0):
|
@@ -214,27 +230,34 @@ class Sort(str, Enum):
|
|
214 |
def get_page(page: int = 1, sort: Sort = Sort.trending):
|
215 |
page = page if page > 0 else 1
|
216 |
if sort == Sort.trending:
|
217 |
-
sort_query = "
|
218 |
elif sort == Sort.recent:
|
219 |
sort_query = "datetime(json_extract(data, '$.lastModified')) DESC"
|
220 |
elif sort == Sort.likes:
|
221 |
-
sort_query = "
|
222 |
|
223 |
with database.get_db() as db:
|
224 |
cursor = db.cursor()
|
225 |
cursor.execute(f"""
|
226 |
SELECT *, COUNT(*) OVER() AS total
|
227 |
FROM models
|
228 |
-
WHERE
|
229 |
ORDER BY {sort_query}
|
230 |
LIMIT {MAX_PAGE_SIZE} OFFSET {(page - 1) * MAX_PAGE_SIZE}
|
231 |
""")
|
232 |
results = cursor.fetchall()
|
233 |
total = results[0]['total'] if results else 0
|
234 |
total_pages = (total + MAX_PAGE_SIZE - 1) // MAX_PAGE_SIZE
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
235 |
|
236 |
return {
|
237 |
-
"models":
|
238 |
"totalPages": total_pages
|
239 |
}
|
240 |
|
@@ -244,8 +267,8 @@ def read_root():
|
|
244 |
return "Just a bot to sync data from diffusers gallery"
|
245 |
|
246 |
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
|
|
|
142 |
|
143 |
async def sync_data():
|
144 |
print("Fetching models")
|
145 |
+
all_models = await get_all_new_models()
|
146 |
+
print(f"Found {len(all_models)} models")
|
147 |
# save list of all models for ids
|
148 |
with open(DB_FOLDER / "models.json", "w") as f:
|
149 |
+
json.dump(all_models, f)
|
150 |
# with open(DB_FOLDER / "models.json", "r") as f:
|
151 |
# new_models = json.load(f)
|
152 |
|
153 |
+
new_models_ids = [model['id'] for model in all_models]
|
154 |
|
155 |
# get existing models
|
156 |
with database.get_db() as db:
|
|
|
159 |
existing_models = [row['id'] for row in cursor.fetchall()]
|
160 |
models_ids_to_add = list(set(new_models_ids) - set(existing_models))
|
161 |
# find all models id to add from new_models
|
162 |
+
models = [model for model in all_models if model['id'] in models_ids_to_add]
|
163 |
|
164 |
print(f"Found {len(models)} new models")
|
165 |
for model in tqdm(models):
|
166 |
model_id = model['id']
|
167 |
+
likes = model['likes']
|
168 |
+
downloads = model['downloads']
|
169 |
model_card = fetch_model_card(model_id)
|
170 |
images = await find_image_in_model_card(model_card)
|
171 |
classifier = run_classifier(images)
|
172 |
# update model row with image and classifier data
|
173 |
with database.get_db() as db:
|
174 |
cursor = db.cursor()
|
175 |
+
cursor.execute("INSERT INTO models(id, data, likes, downloads) VALUES (?, ?, ?, ?)",
|
176 |
+
[model_id,
|
177 |
+
json.dumps({
|
178 |
+
**model,
|
179 |
+
"images": images,
|
180 |
+
"class": classifier
|
181 |
+
}),
|
182 |
+
likes,
|
183 |
+
downloads
|
184 |
+
])
|
185 |
+
db.commit()
|
186 |
+
print("Update likes and downloads")
|
187 |
+
for model in tqdm(all_models):
|
188 |
+
model_id = model['id']
|
189 |
+
likes = model['likes']
|
190 |
+
downloads = model['downloads']
|
191 |
+
with database.get_db() as db:
|
192 |
+
cursor = db.cursor()
|
193 |
+
cursor.execute("UPDATE models SET likes = ?, downloads = ? WHERE id = ?",
|
194 |
+
[likes, downloads, model_id])
|
195 |
db.commit()
|
196 |
|
197 |
if (len(models) > 0):
|
|
|
230 |
def get_page(page: int = 1, sort: Sort = Sort.trending):
|
231 |
page = page if page > 0 else 1
|
232 |
if sort == Sort.trending:
|
233 |
+
sort_query = "((likes + downloads)/2) / MYPOWER((JULIANDAY('now') - JULIANDAY(datetime(json_extract(data, '$.lastModified')))) + 2, 1.5) DESC"
|
234 |
elif sort == Sort.recent:
|
235 |
sort_query = "datetime(json_extract(data, '$.lastModified')) DESC"
|
236 |
elif sort == Sort.likes:
|
237 |
+
sort_query = "likes DESC"
|
238 |
|
239 |
with database.get_db() as db:
|
240 |
cursor = db.cursor()
|
241 |
cursor.execute(f"""
|
242 |
SELECT *, COUNT(*) OVER() AS total
|
243 |
FROM models
|
244 |
+
WHERE likes > 4
|
245 |
ORDER BY {sort_query}
|
246 |
LIMIT {MAX_PAGE_SIZE} OFFSET {(page - 1) * MAX_PAGE_SIZE}
|
247 |
""")
|
248 |
results = cursor.fetchall()
|
249 |
total = results[0]['total'] if results else 0
|
250 |
total_pages = (total + MAX_PAGE_SIZE - 1) // MAX_PAGE_SIZE
|
251 |
+
models_data = []
|
252 |
+
for result in results:
|
253 |
+
data = json.loads(result['data'])
|
254 |
+
# update downloads and likes from db table
|
255 |
+
data['downloads'] = result['downloads']
|
256 |
+
data['likes'] = result['likes']
|
257 |
+
models_data.append(data)
|
258 |
|
259 |
return {
|
260 |
+
"models": models_data,
|
261 |
"totalPages": total_pages
|
262 |
}
|
263 |
|
|
|
267 |
return "Just a bot to sync data from diffusers gallery"
|
268 |
|
269 |
|
270 |
+
@app.on_event("startup")
|
271 |
+
@repeat_every(seconds=60 * 60 * 24, wait_first=False)
|
272 |
+
async def repeat_sync():
|
273 |
+
await sync_data()
|
274 |
+
return "Synced data to huggingface datasets"
|
schema.sql
CHANGED
@@ -5,6 +5,8 @@ BEGIN TRANSACTION;
|
|
5 |
CREATE TABLE models (
|
6 |
id TEXT PRIMARY KEY NOT NULL,
|
7 |
data json,
|
|
|
|
|
8 |
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
9 |
);
|
10 |
|
|
|
5 |
CREATE TABLE models (
|
6 |
id TEXT PRIMARY KEY NOT NULL,
|
7 |
data json,
|
8 |
+
likes INTEGER DEFAULT 0 NOT NULL,
|
9 |
+
downloads INTEGER DEFAULT 0 NOT NULL,
|
10 |
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
|
11 |
);
|
12 |
|