{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Convert\n", "Converts dataset from NIST-In-Situ-IN625-LPBF-Overhangs to masked dataset" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", " from .autonotebook import tqdm as notebook_tqdm\n" ] } ], "source": [ "import matplotlib.pyplot as plt\n", "import matplotlib.colors as colors\n", "import numpy as np\n", "import torch\n", "\n", "from datasets import load_dataset\n", "from tqdm import tqdm" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Downloading readme: 100%|██████████| 1.22k/1.22k [00:00<00:00, 3.49MB/s]\n" ] } ], "source": [ "dataset = load_dataset(\n", " \"ppak10/NIST-In-Situ-IN625-LPBF-Overhangs\",\n", " \"frames\",\n", " split = \"data\",\n", " num_proc = 20,\n", ")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "['frame_index', 'folder_layer_range', 'part', 'part_section', 'process', 'source', 'supports', 'layer_number', 'build_time', 'contact_email', 'file_name', 'hatch_spacing', 'laser_power', 'layer_thickness', 'material', 'radiant_temp', 'raw_frame_number', 'resolution', 's_hvariable__a', 's_hvariable__b', 's_hvariable__c', 'scan_speed', 'website']\n" ] } ], "source": [ "print(dataset.column_names)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "['frame_index', 'layer_number', 'radiant_temp', 'scan_speed', 'laser_power']\n" ] } ], "source": [ "select_dataset = dataset.select_columns([\"frame_index\", \"layer_number\", \"radiant_temp\", \"scan_speed\", \"laser_power\"])\n", "print(select_dataset.column_names)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "image_size = 64" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def create_spherical_mask(shape, center, radius):\n", " \"\"\"\n", " Create a spherical mask.\n", "\n", " Parameters:\n", " - shape: Tuple specifying the shape of the mask (e.g., (height, width))\n", " - center: Tuple specifying the center of the sphere (e.g., (center_y, center_x))\n", " - radius: Radius of the sphere\n", "\n", " Returns:\n", " - mask: NumPy array representing the spherical mask\n", " \"\"\"\n", " # Create grid of coordinates\n", " y, x = np.ogrid[:shape[0], :shape[1]]\n", " \n", " # Calculate distance from each point to the center\n", " distance_from_center = np.sqrt((x - center[1])**2 + (y - center[0])**2)\n", " \n", " # Create mask where values inside the sphere are set to 1 and outside to 0\n", " mask = distance_from_center > radius\n", " \n", " return mask.astype(int)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def crop_and_mask(example):\n", " # Convert radiant temperature to numpy array.\n", " frame = np.array(example[\"radiant_temp\"])\n", "\n", " # Find the max temperature\n", " y, x = np.unravel_index(frame.argmax(), frame.shape)\n", "\n", " # Find the cropping boundaries\n", " x_crop_start_index = x - image_size // 2\n", " x_crop_stop_index = x + image_size // 2\n", " y_crop_start_index = y - image_size // 2\n", " y_crop_stop_index = y + image_size // 2\n", "\n", " # Bound the cropping boundaries to be within frame\n", " x_crop_start = max(0, x_crop_start_index)\n", " x_crop_stop = min(x_crop_stop_index, frame.shape[1])\n", " y_crop_start = max(0, y_crop_start_index)\n", " y_crop_stop = min(y_crop_stop_index, frame.shape[0])\n", "\n", " # Negative index values indicate padding needed\n", " x_crop_start_padding = min(0, x_crop_start_index)\n", " x_crop_stop_padding = min(0, frame.shape[1] - x_crop_stop_index)\n", " y_crop_start_padding = min(0, y_crop_start_index)\n", " y_crop_stop_padding = min(0, frame.shape[0] - y_crop_stop_index)\n", "\n", " # Add padding to negative crops\n", " def add_zeros_top(array, num_rows):\n", " return np.concatenate((np.zeros((num_rows, array.shape[1])), array), axis=0)\n", "\n", " def add_zeros_left(array, num_cols):\n", " return np.concatenate((np.zeros((array.shape[0], num_cols)), array), axis=1)\n", "\n", " def add_zeros_right(array, num_cols):\n", " return np.concatenate((array, np.zeros((array.shape[0], num_cols))), axis=1)\n", "\n", " def add_zeros_bottom(array, num_rows):\n", " return np.concatenate((array, np.zeros((num_rows, array.shape[1]))), axis=0)\n", "\n", " # Apply crop paddings\n", " frame_cropped = frame[y_crop_start:y_crop_stop, x_crop_start:x_crop_stop]\n", " # print(frame_cropped.shape)\n", " frame_cropped = add_zeros_top(frame_cropped, abs(y_crop_start_padding))\n", " # print(y_crop_start_padding, y_crop_start_padding, frame_cropped.shape)\n", " frame_cropped = add_zeros_bottom(frame_cropped, abs(y_crop_stop_padding))\n", " # print(y_crop_stop_padding, y_crop_stop_padding,frame_cropped.shape)\n", " frame_cropped = add_zeros_left(frame_cropped, abs(x_crop_start_padding))\n", " # print(x_crop_start_padding, x_crop_start_padding, frame_cropped.shape)\n", " frame_cropped = add_zeros_right(frame_cropped, abs(x_crop_stop_padding))\n", " # print(x_crop_stop_padding, x_crop_stop_padding,frame_cropped.shape)\n", "\n", " # Turns 0s to 1s to allow 0 value for mask\n", " frame_cropped = np.where(frame_cropped == 0, 1, frame_cropped).astype(int)\n", "\n", " # Find index of max temp after crop\n", " cropped_y, cropped_x = np.unravel_index(frame_cropped.argmax(), frame_cropped.shape)\n", "\n", " # Example usage\n", " shape = (image_size, image_size) # Shape of the mask\n", " center = (cropped_y, cropped_x) # Center of the sphere\n", " radius = 5 # Radius of the sphere\n", " \n", " # Create spherical mask\n", " mask = create_spherical_mask(shape, center, radius)\n", " masked_frame = frame_cropped * mask\n", "\n", " example[\"mask\"] = mask\n", " example[\"masked_frame\"] = masked_frame\n", " example[\"target\"] = frame_cropped\n", " example[\"radius\"] = radius\n", " example[\"center\"] = center\n", " return example\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Map (num_proc=20): 50%|████▉ | 349980/706074 [12:21<12:34, 472.14 examples/s] \n" ] }, { "ename": "OSError", "evalue": "[Errno 28] No space left on device", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mRemoteTraceback\u001b[0m Traceback (most recent call last)", "\u001b[0;31mRemoteTraceback\u001b[0m: \n\"\"\"\nTraceback (most recent call last):\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_dataset.py\", line 3533, in _map_single\n writer.write(example)\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_writer.py\", line 500, in write\n self.write_examples_on_file()\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_writer.py\", line 458, in write_examples_on_file\n self.write_batch(batch_examples=batch_examples)\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_writer.py\", line 572, in write_batch\n self.write_table(pa_table, writer_batch_size)\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_writer.py\", line 590, in write_table\n self.pa_writer.write_table(pa_table, writer_batch_size)\n File \"pyarrow/ipc.pxi\", line 529, in pyarrow.lib._CRecordBatchWriter.write_table\n File \"pyarrow/error.pxi\", line 88, in pyarrow.lib.check_status\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/fsspec/implementations/local.py\", line 389, in write\n return self.f.write(*args, **kwargs)\nOSError: [Errno 28] No space left on device\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_dataset.py\", line 3582, in _map_single\n writer.finalize()\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_writer.py\", line 599, in finalize\n self.write_examples_on_file()\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_writer.py\", line 458, in write_examples_on_file\n self.write_batch(batch_examples=batch_examples)\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_writer.py\", line 572, in write_batch\n self.write_table(pa_table, writer_batch_size)\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_writer.py\", line 590, in write_table\n self.pa_writer.write_table(pa_table, writer_batch_size)\n File \"pyarrow/ipc.pxi\", line 529, in pyarrow.lib._CRecordBatchWriter.write_table\n File \"pyarrow/error.pxi\", line 88, in pyarrow.lib.check_status\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/fsspec/implementations/local.py\", line 389, in write\n return self.f.write(*args, **kwargs)\nOSError: [Errno 28] No space left on device\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/multiprocess/pool.py\", line 125, in worker\n result = (True, func(*args, **kwds))\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/utils/py_utils.py\", line 675, in _write_generator_to_queue\n for i, result in enumerate(func(**kwargs)):\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_dataset.py\", line 3587, in _map_single\n raise\n File \"/usr/lib/python3.8/contextlib.py\", line 525, in __exit__\n raise exc_details[1]\n File \"/usr/lib/python3.8/contextlib.py\", line 510, in __exit__\n if cb(*exc_details):\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_writer.py\", line 369, in __exit__\n self.close()\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_writer.py\", line 379, in close\n self.stream.close() # This also closes self.pa_writer if it is opened\n File \"/home/ppak/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/fsspec/implementations/local.py\", line 407, in close\n return self.f.close()\nOSError: [Errno 28] No space left on device\n\"\"\"", "\nThe above exception was the direct cause of the following exception:\n", "\u001b[0;31mOSError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[28], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m converted_dataset \u001b[38;5;241m=\u001b[39m \u001b[43mselect_dataset\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmap\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcrop_and_mask\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnum_proc\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m20\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 2\u001b[0m \u001b[38;5;28mprint\u001b[39m(converted_dataset\u001b[38;5;241m.\u001b[39mcolumn_names)\n", "File \u001b[0;32m~/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_dataset.py:602\u001b[0m, in \u001b[0;36mtransmit_tasks..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 600\u001b[0m \u001b[38;5;28mself\u001b[39m: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDataset\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m=\u001b[39m kwargs\u001b[38;5;241m.\u001b[39mpop(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mself\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 601\u001b[0m \u001b[38;5;66;03m# apply actual function\u001b[39;00m\n\u001b[0;32m--> 602\u001b[0m out: Union[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDataset\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDatasetDict\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 603\u001b[0m datasets: List[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDataset\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlist\u001b[39m(out\u001b[38;5;241m.\u001b[39mvalues()) \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(out, \u001b[38;5;28mdict\u001b[39m) \u001b[38;5;28;01melse\u001b[39;00m [out]\n\u001b[1;32m 604\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m dataset \u001b[38;5;129;01min\u001b[39;00m datasets:\n\u001b[1;32m 605\u001b[0m \u001b[38;5;66;03m# Remove task templates if a column mapping of the template is no longer valid\u001b[39;00m\n", "File \u001b[0;32m~/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_dataset.py:567\u001b[0m, in \u001b[0;36mtransmit_format..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 560\u001b[0m self_format \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 561\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtype\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_format_type,\n\u001b[1;32m 562\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mformat_kwargs\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_format_kwargs,\n\u001b[1;32m 563\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcolumns\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_format_columns,\n\u001b[1;32m 564\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124moutput_all_columns\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_output_all_columns,\n\u001b[1;32m 565\u001b[0m }\n\u001b[1;32m 566\u001b[0m \u001b[38;5;66;03m# apply actual function\u001b[39;00m\n\u001b[0;32m--> 567\u001b[0m out: Union[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDataset\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDatasetDict\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 568\u001b[0m datasets: List[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDataset\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlist\u001b[39m(out\u001b[38;5;241m.\u001b[39mvalues()) \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(out, \u001b[38;5;28mdict\u001b[39m) \u001b[38;5;28;01melse\u001b[39;00m [out]\n\u001b[1;32m 569\u001b[0m \u001b[38;5;66;03m# re-apply format to the output\u001b[39;00m\n", "File \u001b[0;32m~/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/arrow_dataset.py:3248\u001b[0m, in \u001b[0;36mDataset.map\u001b[0;34m(self, function, with_indices, with_rank, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint, desc)\u001b[0m\n\u001b[1;32m 3242\u001b[0m logger\u001b[38;5;241m.\u001b[39minfo(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSpawning \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mnum_proc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m processes\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 3243\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m hf_tqdm(\n\u001b[1;32m 3244\u001b[0m unit\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m examples\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 3245\u001b[0m total\u001b[38;5;241m=\u001b[39mpbar_total,\n\u001b[1;32m 3246\u001b[0m desc\u001b[38;5;241m=\u001b[39m(desc \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMap\u001b[39m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;241m+\u001b[39m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m (num_proc=\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mnum_proc\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m)\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 3247\u001b[0m ) \u001b[38;5;28;01mas\u001b[39;00m pbar:\n\u001b[0;32m-> 3248\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m rank, done, content \u001b[38;5;129;01min\u001b[39;00m iflatmap_unordered(\n\u001b[1;32m 3249\u001b[0m pool, Dataset\u001b[38;5;241m.\u001b[39m_map_single, kwargs_iterable\u001b[38;5;241m=\u001b[39mkwargs_per_job\n\u001b[1;32m 3250\u001b[0m ):\n\u001b[1;32m 3251\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m done:\n\u001b[1;32m 3252\u001b[0m shards_done \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n", "File \u001b[0;32m~/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/utils/py_utils.py:715\u001b[0m, in \u001b[0;36miflatmap_unordered\u001b[0;34m(pool, func, kwargs_iterable)\u001b[0m\n\u001b[1;32m 712\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 713\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m pool_changed:\n\u001b[1;32m 714\u001b[0m \u001b[38;5;66;03m# we get the result in case there's an error to raise\u001b[39;00m\n\u001b[0;32m--> 715\u001b[0m [async_result\u001b[38;5;241m.\u001b[39mget(timeout\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0.05\u001b[39m) \u001b[38;5;28;01mfor\u001b[39;00m async_result \u001b[38;5;129;01min\u001b[39;00m async_results]\n", "File \u001b[0;32m~/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/datasets/utils/py_utils.py:715\u001b[0m, in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 712\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 713\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m pool_changed:\n\u001b[1;32m 714\u001b[0m \u001b[38;5;66;03m# we get the result in case there's an error to raise\u001b[39;00m\n\u001b[0;32m--> 715\u001b[0m [\u001b[43masync_result\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m0.05\u001b[39;49m\u001b[43m)\u001b[49m \u001b[38;5;28;01mfor\u001b[39;00m async_result \u001b[38;5;129;01min\u001b[39;00m async_results]\n", "File \u001b[0;32m~/HuggingFace/Datasets/Melt-Pool-Thermal-Images/venv/lib/python3.8/site-packages/multiprocess/pool.py:771\u001b[0m, in \u001b[0;36mApplyResult.get\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m 769\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_value\n\u001b[1;32m 770\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 771\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_value\n", "\u001b[0;31mOSError\u001b[0m: [Errno 28] No space left on device" ] } ], "source": [ "converted_dataset = select_dataset.map(crop_and_mask, num_proc=20)\n", "print(converted_dataset.column_names)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.80ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.79ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.77ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.85ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.83ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.86ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.91ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.93ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.86ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.96ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.82ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.82ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.83ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.98ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.85ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.85ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.84ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.80ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.73ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.77ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.75ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.80ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.76ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.80ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.81ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.81ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.86ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.85ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.89ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.81ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.93ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.88ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.80ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.76ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.69ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.73ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.86ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.81ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.84ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.76ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.81ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.77ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.81ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.84ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.81ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.81ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.81ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.86ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.80ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.77ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.70ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.70ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.69ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.65ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.67ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.73ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.68ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.83ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.81ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.70ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.67ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.71ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.72ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.77ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.75ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.76ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.80ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.77ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.77ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.83ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.79ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.75ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.72ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.72ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.72ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.73ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.73ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.76ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.83ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.84ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.75ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.71ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.76ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.74ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.83ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.89ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.79ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.85ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.83ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.81ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.84ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.82ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.79ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.71ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.75ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.72ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.70ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.76ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.74ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.71ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.78ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.75ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.73ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.79ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.69ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.65ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.65ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.71ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.64ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.84ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.73ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.69ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.73ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.62ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.66ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.68ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.60ba/s]\n", "Creating parquet from Arrow format: 100%|██████████| 6/6 [00:03<00:00, 1.64ba/s]\n", "Uploading the dataset shards: 100%|██████████| 129/129 [11:14<00:00, 5.23s/it]\n" ] }, { "data": { "text/plain": [ "CommitInfo(commit_url='https://huggingface.co/datasets/ppak10/Melt-Pool-Thermal-Images/commit/c5da9d5e99f950c9e241c747cd68fa8d81ddd873', commit_message='Upload dataset (part 00002-of-00003)', commit_description='', oid='c5da9d5e99f950c9e241c747cd68fa8d81ddd873', pr_url=None, pr_revision=None, pr_num=None)" ] }, "execution_count": 25, "metadata": {}, "output_type": "execute_result" } ], "source": [ "converted_dataset.push_to_hub(\n", " \"ppak10/Melt-Pool-Thermal-Images\",\n", " config_name = \"masked\",\n", " split = \"data\" \n", ")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "venv", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.10" } }, "nbformat": 4, "nbformat_minor": 2 }