import contextlib
import re
import tempfile
from functools import lru_cache
import gradio as gr
from git import Repo
from httpx import Client
from huggingface_hub import create_repo, upload_folder
from toolz import groupby
client = Client()
def clone_into_temp_dir(github_repo_url):
temp_dir = tempfile.TemporaryDirectory()
return Repo.clone_from(github_repo_url, temp_dir), temp_dir
repo = clone_into_temp_dir("https://github.com/chen-zichen/XplainLLM_dataset/")
clone_into_temp_dir("https://github.com/chen-zichen/XplainLLM_dataset/")
def upload_directory_to_hf(
repo_id: str,
directory: str,
token: str,
private: bool = False,
):
url = create_repo(
repo_id,
token=token,
exist_ok=True,
repo_type="dataset",
private=private,
)
commit_url = upload_folder(
folder_path=directory,
path_in_repo="data",
repo_id=repo_id,
repo_type="dataset",
token=token,
commit_message="Migrated from GitHub",
ignore_patterns=[
"*.git*",
"*README.md*",
"*.DS_Store",
"*.env",
], # ignore git files, README, and .env files
)
def push_to_hf(
source_github_repository, destination_hf_hub_repository, hf_token, subdirectory=None
):
gr.Info("Cloning source GitHub repository...")
repo, temporary_directory = clone_into_temp_dir(source_github_repository)
gr.Info("Cloning source GitHub repository...Done")
gr.Info("Syncing with Hugging Face Hub...")
if subdirectory:
src_directory = f"{repo.working_dir}/{subdirectory[0]}"
else:
src_directory = repo.working_dir
upload_directory_to_hf(
repo_id=destination_hf_hub_repository,
directory=src_directory,
token=hf_token,
private=False,
)
gr.Info("Syncing with Hugging Face Hub...Done")
temporary_directory.cleanup()
return f"Pushed the dataset to [{destination_hf_hub_repository}](https://huggingface.co/datasets{destination_hf_hub_repository})"
def extract_user_name_and_repo_from_url(github_url: str):
pattern = r"https://github.com/([^/]+)/([^/]+)"
if match := re.search(pattern, github_url):
return match[1], match[2]
print("No match found in the GitHub URL.")
return None
def get_files_and_directories(response):
data = response.json()
grouped_by_type = groupby(lambda item: item["type"], data["tree"])
files = grouped_by_type.get("blob", [])
directories = grouped_by_type.get("tree", [])
if files:
files = [file["path"] for file in files]
if directories:
directories = [directory["path"] for directory in directories]
return {"files": files, "directories": directories}
@lru_cache(maxsize=128)
def list_git_repo_files_and_directories(repo_url: str, branch: str = "main"):
user_name_and_repo = extract_user_name_and_repo_from_url(repo_url)
if user_name_and_repo is None:
return None
user_name, repo_name = user_name_and_repo
url = f"https://api.github.com/repos/{user_name}/{repo_name}/git/trees/{branch}"
response = client.get(url)
if response.status_code == 200:
return get_files_and_directories(response)
def show_files_and_directories(url: str):
with contextlib.suppress(Exception):
files_and_directories = list_git_repo_files_and_directories(url)
directories = files_and_directories.get("directories", [])
files = files_and_directories.get("files", [])
print(directories)
return gr.Dropdown(
label="Directories",
choices=directories,
max_choices=1,
visible=True,
interactive=True,
multiselect=True,
), gr.Dropdown(
label="Files",
choices=files,
max_choices=None,
visible=True,
interactive=True,
multiselect=True,
)
html_text_app_description = """
Whilst GitHub is great for hosting code the Hugging Face Datasets Hub is a better place to host datasets.
Some of the benefits of hosting datasets on the Hugging Face Datasets Hub are: