|
import pandas as pd |
|
import os |
|
from PIL import Image |
|
import numpy as np |
|
import torch |
|
import matplotlib.pyplot as plt |
|
from IPython import get_ipython |
|
import sys |
|
import gc |
|
import streamlit as st |
|
import logging |
|
import functools |
|
|
|
|
|
|
|
|
|
|
|
log_dir = 'my_model/utilities' |
|
if not os.path.exists(log_dir): |
|
os.makedirs(log_dir) |
|
|
|
|
|
logging.basicConfig(filename=os.path.join(log_dir, 'app.log'), level=logging.DEBUG, format='%(asctime)s - %(message)s') |
|
|
|
def log_event(event: str) -> None: |
|
""" |
|
Logs an event with the current timestamp. |
|
Args: |
|
event (str): The event description to log. |
|
""" |
|
try: |
|
logging.info(event) |
|
except Exception as e: |
|
logging.error(f"Failed to log event: {e}") |
|
|
|
def log_function_call(func): |
|
""" |
|
Decorator that logs the function call details (name and arguments). |
|
Args: |
|
func (function): The function to be decorated. |
|
Returns: |
|
function: The wrapped function with logging. |
|
""" |
|
@functools.wraps(func) |
|
def wrapper(*args, **kwargs): |
|
log_event(f'Function {func.__name__} called with args: {args} and kwargs: {kwargs}') |
|
result = func(*args, **kwargs) |
|
log_event(f'Function {func.__name__} completed') |
|
return result |
|
return wrapper |
|
|
|
|
|
|
|
|
|
def show_image(image): |
|
""" |
|
Display an image in various environments (Jupyter, PyCharm, Hugging Face Spaces). |
|
Handles different types of image inputs (file path, PIL Image, numpy array, OpenCV, PyTorch tensor). |
|
|
|
Args: |
|
image (str or PIL.Image or numpy.ndarray or torch.Tensor): The image to display. |
|
""" |
|
|
|
in_jupyter = is_jupyter_notebook() |
|
in_colab = is_google_colab() |
|
|
|
|
|
if isinstance(image, str): |
|
|
|
if os.path.isfile(image): |
|
image = Image.open(image) |
|
else: |
|
raise ValueError("File path provided does not exist.") |
|
elif isinstance(image, np.ndarray): |
|
|
|
if image.ndim == 3 and image.shape[2] in [3, 4]: |
|
|
|
image = Image.fromarray(image[..., ::-1] if image.shape[2] == 3 else image) |
|
else: |
|
|
|
image = Image.fromarray(image) |
|
elif torch.is_tensor(image): |
|
|
|
image = Image.fromarray(image.permute(1, 2, 0).numpy().astype(np.uint8)) |
|
|
|
|
|
if in_jupyter or in_colab: |
|
|
|
from IPython.display import display |
|
display(image) |
|
else: |
|
|
|
image.show() |
|
|
|
|
|
|
|
def show_image_with_matplotlib(image): |
|
if isinstance(image, str): |
|
image = Image.open(image) |
|
elif isinstance(image, np.ndarray): |
|
image = Image.fromarray(image) |
|
elif torch.is_tensor(image): |
|
image = Image.fromarray(image.permute(1, 2, 0).numpy().astype(np.uint8)) |
|
|
|
plt.imshow(image) |
|
plt.axis('off') |
|
plt.show() |
|
|
|
|
|
def is_jupyter_notebook(): |
|
""" |
|
Check if the code is running in a Jupyter notebook. |
|
|
|
Returns: |
|
bool: True if running in a Jupyter notebook, False otherwise. |
|
""" |
|
try: |
|
from IPython import get_ipython |
|
if 'IPKernelApp' not in get_ipython().config: |
|
return False |
|
if 'ipykernel' in str(type(get_ipython())): |
|
return True |
|
except (NameError, AttributeError): |
|
return False |
|
|
|
return False |
|
|
|
|
|
def is_pycharm(): |
|
return 'PYCHARM_HOSTED' in os.environ |
|
|
|
|
|
def is_google_colab(): |
|
return 'COLAB_GPU' in os.environ or 'google.colab' in sys.modules |
|
|
|
|
|
def get_image_path(name, path_type): |
|
""" |
|
Generates a path for models, images, or data based on the specified type. |
|
|
|
Args: |
|
name (str): The name of the model, image, or data folder/file. |
|
path_type (str): The type of path needed ('models', 'images', or 'data'). |
|
|
|
Returns: |
|
str: The full path to the specified resource. |
|
""" |
|
|
|
current_dir = os.getcwd() |
|
|
|
|
|
parent_dir = os.path.dirname(current_dir) |
|
|
|
|
|
folder_path = os.path.join(parent_dir, path_type) |
|
|
|
|
|
full_path = os.path.join(folder_path, name) |
|
|
|
return full_path |
|
|
|
|
|
def get_model_path(model_name): |
|
""" |
|
Get the path to the specified model folder. |
|
|
|
Args: |
|
model_name (str): Name of the model folder. |
|
|
|
Returns: |
|
str: Absolute path to the specified model folder. |
|
""" |
|
|
|
current_script_dir = os.path.dirname(os.path.abspath(__file__)) |
|
|
|
|
|
app_dir = os.path.dirname(os.path.dirname(current_script_dir)) |
|
|
|
|
|
model_path = os.path.join(app_dir, "models", model_name) |
|
|
|
return model_path |
|
|
|
|
|
|
|
def free_gpu_resources(): |
|
""" |
|
Clears GPU memory. |
|
""" |
|
|
|
if torch.cuda.is_available(): |
|
torch.cuda.empty_cache() |
|
torch.cuda.empty_cache() |
|
gc.collect() |
|
gc.collect() |
|
|
|
|