|
import os |
|
import subprocess |
|
from openpyxl import Workbook |
|
from git import Repo |
|
import os |
|
import shutil |
|
import tempfile |
|
import uuid |
|
import re |
|
def remove_cpp_comments(code): |
|
|
|
code = re.sub(r'/\*[\s\S]*?\*/', '', code) |
|
|
|
|
|
code = re.sub(r'//.*', '', code) |
|
|
|
|
|
code = '\n'.join(line for line in code.splitlines() if line.strip()) |
|
|
|
return code |
|
|
|
def process_dataframe(df): |
|
|
|
df['Code'] = df['Code'].apply(remove_cpp_comments) |
|
df['Unit Test - (Ground Truth)'] = df['Unit Test - (Ground Truth)'].apply(remove_cpp_comments) |
|
return df |
|
|
|
|
|
id_counter = 0 |
|
|
|
def get_remote_default_branch(repo_url): |
|
try: |
|
|
|
temp_dir = tempfile.mkdtemp() |
|
repo = Repo.clone_from(repo_url, temp_dir, depth=1) |
|
|
|
|
|
default_branch = repo.active_branch.name |
|
return default_branch |
|
except Exception as e: |
|
print(f"Error: {e}") |
|
return None |
|
finally: |
|
|
|
shutil.rmtree(temp_dir) |
|
|
|
def clone_repo(repo_url: str, local_path: str) -> str: |
|
""" |
|
Clone a Git repository to a local path if it doesn't exist. |
|
Determine the default branch (either 'main', 'master', or another default) of the repository. |
|
|
|
Args: |
|
repo_url (str): The URL of the Git repository. |
|
local_path (str): The local path where the repository should be cloned. |
|
|
|
Returns: |
|
str: The name of the default branch of the repository. |
|
""" |
|
if not os.path.exists(local_path): |
|
subprocess.run(['git', 'clone', repo_url, local_path], check=True) |
|
|
|
def get_commit_hash(repo_path: str) -> str: |
|
""" |
|
Get the latest commit hash of a repository. |
|
|
|
Args: |
|
repo_path (str): The local path of the repository. |
|
|
|
Returns: |
|
str: The latest commit hash. |
|
""" |
|
result = subprocess.run(['git', '-C', repo_path, 'rev-parse', 'HEAD'], stdout=subprocess.PIPE, check=True) |
|
return result.stdout.decode('utf-8').strip() |
|
|
|
def find_files(directory: str, extensions: list) -> dict: |
|
""" |
|
Find files with specific extensions within a directory. |
|
|
|
Args: |
|
directory (str): The directory to search within. |
|
extensions (list): A list of file extensions to search for. |
|
|
|
Returns: |
|
dict: A dictionary where keys are file extensions and values are lists of file paths. |
|
""" |
|
found_files = {ext: [] for ext in extensions} |
|
for root, _, files in os.walk(directory): |
|
for file in files: |
|
for ext in extensions: |
|
if file.endswith(ext): |
|
found_files[ext].append(os.path.join(root, file)) |
|
return found_files |
|
|
|
def group_files_by_basename(files_dict: dict) -> dict: |
|
""" |
|
Group files by their base name, associating code files with their corresponding test files. |
|
|
|
Args: |
|
files_dict (dict): A dictionary of lists of file paths, categorized by extensions. |
|
|
|
Returns: |
|
dict: A dictionary where keys are base names of files and values are dictionaries of file paths. |
|
""" |
|
grouped_files = {} |
|
for ext in ['.cc', '.h']: |
|
for file_path in files_dict.get(ext, []): |
|
base_name = os.path.basename(file_path).replace(ext, '') |
|
if base_name not in grouped_files: |
|
grouped_files[base_name] = {} |
|
grouped_files[base_name][ext] = file_path |
|
|
|
for ext in ['_test.cc', '_unittest.cc']: |
|
for file_path in files_dict.get(ext, []): |
|
base_name = os.path.basename(file_path).replace(ext, '') |
|
if base_name not in grouped_files: |
|
grouped_files[base_name] = {} |
|
grouped_files[base_name][ext] = file_path |
|
|
|
|
|
grouped_files_with_tests = { |
|
k: v for k, v in grouped_files.items() if '_test.cc' in v or '_unittest.cc' in v |
|
} |
|
return grouped_files_with_tests |
|
|
|
def read_file_content(file_path: str) -> str: |
|
""" |
|
Read the content of a file. |
|
|
|
Args: |
|
file_path (str): The path to the file. |
|
|
|
Returns: |
|
str: The content of the file. |
|
""" |
|
with open(file_path, 'r') as file: |
|
return file.read() |
|
|
|
def process_repo(repo_url: str, local_path: str, default_branch: str) -> list: |
|
""" |
|
Process a repository by finding files, reading content, and generating data for each file. |
|
|
|
Args: |
|
repo_url (str): The URL of the Git repository. |
|
local_path (str): The local path of the repository. |
|
default_branch (str): The default branch of the repository. |
|
|
|
Returns: |
|
list: A list of file data dictionaries. |
|
""" |
|
codebases_path = 'codebases' |
|
os.makedirs(codebases_path, exist_ok=True) |
|
|
|
repo_local_path = os.path.join(codebases_path, local_path) |
|
|
|
clone_repo(repo_url, repo_local_path) |
|
|
|
|
|
commit_hash = get_commit_hash(repo_local_path) |
|
|
|
repo_directory = repo_local_path |
|
extensions = ['.cc', '.h', '_test.cc', '_unittest.cc'] |
|
files_dict = find_files(repo_directory, extensions) |
|
grouped_files = group_files_by_basename(files_dict) |
|
file_data_list = [] |
|
|
|
for base_name, file_group in grouped_files.items(): |
|
code_content = "" |
|
file_path = "" |
|
unit_test_content = "" |
|
unit_test_path = "" |
|
|
|
if '.cc' in file_group or '.h' in file_group: |
|
if '.cc' in file_group: |
|
code_content += read_file_content(file_group['.cc']) + "\n" |
|
file_path = file_group['.cc'] |
|
elif '.h' in file_group: |
|
code_content += read_file_content(file_group['.h']) + "\n" |
|
file_path = file_group['.h'] |
|
|
|
if '_test.cc' in file_group: |
|
unit_test_content = read_file_content(file_group['_test.cc']) |
|
unit_test_path = file_group['_test.cc'] |
|
elif '_unittest.cc' in file_group: |
|
unit_test_content = read_file_content(file_group['_unittest.cc']) |
|
unit_test_path = file_group['_unittest.cc'] |
|
|
|
relative_file_path = os.path.relpath(file_path, repo_directory) |
|
relative_unit_test_path = os.path.relpath(unit_test_path, repo_directory) |
|
|
|
|
|
unique_id = str(uuid.uuid4()) |
|
|
|
file_data_list.append({ |
|
'id': unique_id, |
|
'language': 'cpp', |
|
'repository_name': f'{repo_url.split("/")[-2]}/{repo_url.split("/")[-1].replace(".git", "")}', |
|
'file_name': base_name, |
|
'file_path_in_repository': relative_file_path, |
|
'file_path_for_unit_test': relative_unit_test_path, |
|
'Code': code_content.strip(), |
|
'Unit Test': unit_test_content.strip(), |
|
'Code Url': f'https://github.com/{repo_url.split("/")[-2]}/{repo_url.split("/")[-1].replace(".git", "")}/blob/{default_branch}/{relative_file_path}', |
|
'Test Code Url': f'https://github.com/{repo_url.split("/")[-2]}/{repo_url.split("/")[-1].replace(".git", "")}/blob/{default_branch}/{relative_unit_test_path}', |
|
'Commit Hash': commit_hash |
|
}) |
|
|
|
return file_data_list |
|
|
|
def save_dict_to_excel(data_dict: dict, output_file: str): |
|
""" |
|
Save a dictionary to an Excel file. |
|
|
|
Args: |
|
data_dict (dict): The dictionary to save, with keys as the first column and values as the second. |
|
output_file (str): The path to the output Excel file. |
|
""" |
|
wb = Workbook() |
|
ws = wb.active |
|
ws.title = "Dictionary Data" |
|
|
|
|
|
ws.append(['Key', 'Value']) |
|
|
|
|
|
for key, value in data_dict.items(): |
|
ws.append([key, value]) |
|
|
|
|
|
wb.save(output_file) |
|
print(f"Dictionary has been written to {output_file}") |
|
|
|
def save_to_excel(file_data_list: list, output_file: str): |
|
""" |
|
Save the collected file data to an Excel file. |
|
|
|
Args: |
|
file_data_list (list): A list of dictionaries containing file data. |
|
output_file (str): The path to the output Excel file. |
|
""" |
|
wb = Workbook() |
|
ws = wb.active |
|
ws.title = "Unit Test Data" |
|
|
|
header = [ |
|
'ID', 'Language', 'Repository Name', 'File Name', |
|
'File Path in Repository', 'File Path for Unit Test', |
|
'Code', 'Unit Test - (Ground Truth)', 'Code Url', 'Test Code Url', 'Commit Hash' |
|
] |
|
ws.append(header) |
|
|
|
for file_data in file_data_list: |
|
ws.append([ |
|
file_data['id'], |
|
file_data['language'], |
|
file_data['repository_name'], |
|
file_data['file_name'], |
|
file_data['file_path_in_repository'], |
|
file_data['file_path_for_unit_test'], |
|
file_data['Code'], |
|
file_data['Unit Test'], |
|
file_data['Code Url'], |
|
file_data['Test Code Url'], |
|
file_data['Commit Hash'] |
|
]) |
|
|
|
wb.save(output_file) |
|
print(f"File data has been written to {output_file}") |
|
|
|
def combine_repo_data(repo_urls: list): |
|
""" |
|
Combine data from multiple repositories and save it to an Excel file. |
|
|
|
Args: |
|
repo_urls (list): A list of Git repository URLs. |
|
""" |
|
all_file_data = [] |
|
global_id_counter = 0 |
|
|
|
|
|
repo_commit_map = {} |
|
|
|
for repo_url in repo_urls: |
|
repo_name = repo_url.split("/")[-1].replace(".git", "") |
|
default_branch = get_remote_default_branch(repo_url) |
|
print(repo_url) |
|
print(default_branch) |
|
file_data = process_repo(repo_url, repo_name, default_branch) |
|
all_file_data.extend(file_data) |
|
|
|
|
|
repo_commit_map[repo_name] = get_commit_hash(os.path.join('codebases', repo_name)) |
|
|
|
|
|
|
|
output_file = 'combined_repo_data.xlsx' |
|
save_to_excel(all_file_data, output_file) |
|
|
|
|
|
print("Repository and Commit Hash Map:") |
|
for repo, commit_hash in repo_commit_map.items(): |
|
print(f"{repo}: {commit_hash}") |
|
save_dict_to_excel(repo_commit_map, 'repo_commit_map.xlsx') |
|
|
|
|
|
repo_urls = [ |
|
'https://github.com/google/googletest.git', |
|
'https://github.com/google/libaddressinput.git', |
|
'https://github.com/abseil/abseil-cpp.git', |
|
'https://github.com/google/libphonenumber.git', |
|
'https://github.com/google/langsvr.git', |
|
'https://github.com/google/tensorstore.git', |
|
'https://github.com/google/arolla.git', |
|
|
|
'https://github.com/tensorflow/tensorflow.git', |
|
'https://github.com/google/glog.git', |
|
'https://github.com/google/leveldb.git', |
|
'https://github.com/google/tsl.git', |
|
'https://github.com/google/quiche.git', |
|
'https://github.com/google/cel-cpp.git' |
|
] |
|
|
|
combine_repo_data(repo_urls) |
|
|