Spaces:
Sleeping
Sleeping
Upload folder using huggingface_hub
Browse files- .devcontainer/.vimrc +62 -0
- .devcontainer/Dockerfile +44 -0
- .devcontainer/devcontainer.json +65 -0
- .devcontainer/docker-compose.yml +28 -0
- .devcontainer/library-scripts/common-debian.sh +454 -0
- .devcontainer/library-scripts/docker-debian.sh +355 -0
- .dockerignore +12 -0
- .gitattributes +0 -34
- .gitignore +15 -0
- .gitmodules +0 -0
- Dockerfile +48 -0
- Makefile +7 -0
- README.md +6 -9
- app.py +245 -0
- download_pdfs.py +33 -0
- model/autodistill_best.pt +3 -0
- model/autodistill_best_seg.pt +3 -0
- model/solar_best.pt +3 -0
- post-devcontainer.sh +3 -0
- render_pdfs.py +32 -0
- requirements.txt +16 -0
- sandbox/autodistill-yolo.ipynb +0 -0
- sandbox/how-to-auto-train-yolov8-model-with-autodistill copy.ipynb +0 -0
- sandbox/mask_hell.ipynb +0 -0
- sandbox/sandbox.ipynb +181 -0
- solareyes/__init__.py +0 -0
- solareyes/sam.py +21 -0
.devcontainer/.vimrc
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
set nocompatible " required
|
2 |
+
filetype off " required
|
3 |
+
|
4 |
+
" set the runtime path to include Vundle and initialize
|
5 |
+
set rtp+=~/.vim/bundle/Vundle.vim
|
6 |
+
call vundle#begin()
|
7 |
+
|
8 |
+
" alternatively, pass a path where Vundle should install plugins
|
9 |
+
"call vundle#begin('~/some/path/here')
|
10 |
+
|
11 |
+
" let Vundle manage Vundle, required
|
12 |
+
Plugin 'gmarik/Vundle.vim'
|
13 |
+
|
14 |
+
" add all your plugins here (note older versions of Vundle
|
15 |
+
" used Bundle instead of Plugin)
|
16 |
+
Plugin 'vim-scripts/indentpython.vim'
|
17 |
+
Bundle 'Valloric/YouCompleteMe'
|
18 |
+
Plugin 'vim-syntastic/syntastic'
|
19 |
+
Plugin 'nvie/vim-flake8'
|
20 |
+
Plugin 'scrooloose/nerdtree'
|
21 |
+
Plugin 'kien/ctrlp.vim'
|
22 |
+
Plugin 'tpope/vim-fugitive'
|
23 |
+
Plugin 'Lokaltog/powerline', {'rtp': 'powerline/bindings/vim/'}
|
24 |
+
|
25 |
+
" ...
|
26 |
+
|
27 |
+
" All of your Plugins must be added before the following line
|
28 |
+
call vundle#end() " required
|
29 |
+
filetype plugin indent on " required
|
30 |
+
|
31 |
+
set splitbelow
|
32 |
+
set splitright
|
33 |
+
|
34 |
+
set foldmethod=indent
|
35 |
+
set foldlevel=99
|
36 |
+
|
37 |
+
nnoremap <space> za
|
38 |
+
|
39 |
+
au BufNewFile, BufRead *.py
|
40 |
+
\ set tabstop=4
|
41 |
+
\ set softtabstop=4
|
42 |
+
\ set shiftwidth=4
|
43 |
+
\ set textwidth=79
|
44 |
+
\ set expandtab
|
45 |
+
\ set autoindent
|
46 |
+
\ set fileformat=unix
|
47 |
+
|
48 |
+
au BufNewFile, BufRead *.js, *.html, *.css
|
49 |
+
\ set tabstop=2
|
50 |
+
\ set softtabstop=2
|
51 |
+
\ set shiftwidth=2
|
52 |
+
|
53 |
+
set encoding=utf-8
|
54 |
+
|
55 |
+
let g:ycm_autoclose_preview_window_after_completion=1
|
56 |
+
map <leader>g :YcmCompleter GoToDefinitionElseDeclaration<CR>
|
57 |
+
|
58 |
+
set clipboard=unnamed
|
59 |
+
set nu
|
60 |
+
|
61 |
+
let python_highlight_all=1
|
62 |
+
syntax on
|
.devcontainer/Dockerfile
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM nvidia/cuda:12.3.1-devel-ubuntu22.04
|
2 |
+
|
3 |
+
ARG USERNAME=vscode
|
4 |
+
|
5 |
+
COPY library-scripts/common-debian.sh /tmp/library-scripts/
|
6 |
+
RUN apt-get update && bash /tmp/library-scripts/common-debian.sh true automatic automatic automatic false true true
|
7 |
+
|
8 |
+
|
9 |
+
RUN SNIPPET="export PROMPT_COMMAND='history -a' && export HISTFILE=/commandhistory/.bash_history" \
|
10 |
+
&& mkdir -p /commandhistory \
|
11 |
+
&& touch /commandhistory/.bash_history \
|
12 |
+
&& chown -R $USERNAME /commandhistory \
|
13 |
+
&& echo "$SNIPPET" >> "/home/$USERNAME/.bashrc"
|
14 |
+
|
15 |
+
RUN SNIPPET="export PROMPT_COMMAND='history -a' && export HISTFILE=/commandhistory/.zsh_history" \
|
16 |
+
&& mkdir -p /commandhistory \
|
17 |
+
&& touch /commandhistory/.zsh_history \
|
18 |
+
&& chown -R $USERNAME /commandhistory \
|
19 |
+
&& echo "$SNIPPET" >> "/home/$USERNAME/.zshrc"
|
20 |
+
|
21 |
+
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
22 |
+
&& apt-get -y install --no-install-recommends \
|
23 |
+
vim \
|
24 |
+
git \
|
25 |
+
git-lfs \
|
26 |
+
zsh \
|
27 |
+
tmux \
|
28 |
+
wget \
|
29 |
+
python3 \
|
30 |
+
python3-dev \
|
31 |
+
python3-pip \
|
32 |
+
python3-setuptools \
|
33 |
+
libgl1-mesa-dev \
|
34 |
+
libglib2.0-0
|
35 |
+
|
36 |
+
RUN git lfs install
|
37 |
+
|
38 |
+
USER vscode
|
39 |
+
|
40 |
+
RUN python3 -m pip install --upgrade pip \
|
41 |
+
&& pip install --no-cache-dir \
|
42 |
+
ultralytics \
|
43 |
+
pillow \
|
44 |
+
gradio
|
.devcontainer/devcontainer.json
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
2 |
+
// README at: https://github.com/devcontainers/templates/tree/main/src/python
|
3 |
+
{
|
4 |
+
"name": "solar_eyes",
|
5 |
+
|
6 |
+
"dockerComposeFile": "docker-compose.yml",
|
7 |
+
|
8 |
+
"workspaceFolder": "/workspaces/solar_eyes",
|
9 |
+
|
10 |
+
"service": "solar_eyes",
|
11 |
+
|
12 |
+
// Features to add to the dev container. More info: https://containers.dev/features.
|
13 |
+
// "features": {},
|
14 |
+
|
15 |
+
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
16 |
+
// "forwardPorts": [],
|
17 |
+
|
18 |
+
// Uncomment the next line to use 'postCreateCommand' to run commands after the container is created.
|
19 |
+
// "postCreateCommand": "./post-devcontainer.sh",
|
20 |
+
|
21 |
+
// Configure tool-specific properties.
|
22 |
+
// Add the IDs of extensions you want installed when the container is created.
|
23 |
+
"customizations": {
|
24 |
+
"vscode": {
|
25 |
+
"extensions": [
|
26 |
+
"ms-azuretools.vscode-docker",
|
27 |
+
"ms-python.python",
|
28 |
+
"timonwong.shellcheck",
|
29 |
+
"GitHub.copilot",
|
30 |
+
"ms-toolsai.jupyter",
|
31 |
+
"mechatroner.rainbow-csv"
|
32 |
+
],
|
33 |
+
"settings": {
|
34 |
+
"editor.tabSize": 4,
|
35 |
+
"terminal.integrated.defaultProfile.linux": "bash",
|
36 |
+
"terminal.integrated.profiles.linux": {
|
37 |
+
"bash": {
|
38 |
+
"path": "bash",
|
39 |
+
"icon": "terminal-bash"
|
40 |
+
},
|
41 |
+
"zsh": {
|
42 |
+
"path": "zsh"
|
43 |
+
},
|
44 |
+
"tmux": {
|
45 |
+
"path": "tmux",
|
46 |
+
"icon": "terminal-tmux"
|
47 |
+
}
|
48 |
+
}
|
49 |
+
}
|
50 |
+
}
|
51 |
+
},
|
52 |
+
|
53 |
+
"features": {
|
54 |
+
"ghcr.io/devcontainers/features/github-cli:1": {},
|
55 |
+
"ghcr.io/mikaello/devcontainer-features/modern-shell-utils:1": {},
|
56 |
+
"ghcr.io/devcontainers/features/docker-outside-of-docker:1": {},
|
57 |
+
},
|
58 |
+
|
59 |
+
"remoteUser": "vscode",
|
60 |
+
|
61 |
+
"postStartCommand": "./post-devcontainer.sh",
|
62 |
+
|
63 |
+
"shutdownAction": "stopCompose"
|
64 |
+
|
65 |
+
}
|
.devcontainer/docker-compose.yml
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: '3'
|
2 |
+
|
3 |
+
services:
|
4 |
+
solar_eyes:
|
5 |
+
shm_size: '2gb'
|
6 |
+
init: true
|
7 |
+
build:
|
8 |
+
context: .
|
9 |
+
dockerfile: Dockerfile
|
10 |
+
volumes:
|
11 |
+
- ..:/workspaces/solar_eyes:cached
|
12 |
+
- /var/run/docker.sock:/var/run/docker-host.sock
|
13 |
+
- ~/.ssh:/home/vscode/.ssh:cached
|
14 |
+
- solar_eyes-bashhistory:/commandhistory
|
15 |
+
deploy:
|
16 |
+
resources:
|
17 |
+
reservations:
|
18 |
+
devices:
|
19 |
+
- driver: nvidia
|
20 |
+
capabilities: [gpu]
|
21 |
+
cap_add:
|
22 |
+
- SYS_PTRACE
|
23 |
+
security_opt:
|
24 |
+
- seccomp:unconfined
|
25 |
+
command: /bin/sh -c "while sleep 1000; do :; done"
|
26 |
+
|
27 |
+
volumes:
|
28 |
+
solar_eyes-bashhistory:
|
.devcontainer/library-scripts/common-debian.sh
ADDED
@@ -0,0 +1,454 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env bash
|
2 |
+
#-------------------------------------------------------------------------------------------------------------
|
3 |
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
4 |
+
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
5 |
+
#-------------------------------------------------------------------------------------------------------------
|
6 |
+
#
|
7 |
+
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
|
8 |
+
# Maintainer: The VS Code and Codespaces Teams
|
9 |
+
#
|
10 |
+
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
|
11 |
+
|
12 |
+
set -e
|
13 |
+
|
14 |
+
INSTALL_ZSH=${1:-"true"}
|
15 |
+
USERNAME=${2:-"automatic"}
|
16 |
+
USER_UID=${3:-"automatic"}
|
17 |
+
USER_GID=${4:-"automatic"}
|
18 |
+
UPGRADE_PACKAGES=${5:-"true"}
|
19 |
+
INSTALL_OH_MYS=${6:-"true"}
|
20 |
+
ADD_NON_FREE_PACKAGES=${7:-"false"}
|
21 |
+
SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
|
22 |
+
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
|
23 |
+
|
24 |
+
if [ "$(id -u)" -ne 0 ]; then
|
25 |
+
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
26 |
+
exit 1
|
27 |
+
fi
|
28 |
+
|
29 |
+
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
|
30 |
+
rm -f /etc/profile.d/00-restore-env.sh
|
31 |
+
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
|
32 |
+
chmod +x /etc/profile.d/00-restore-env.sh
|
33 |
+
|
34 |
+
# If in automatic mode, determine if a user already exists, if not use vscode
|
35 |
+
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
|
36 |
+
USERNAME=""
|
37 |
+
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
|
38 |
+
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
|
39 |
+
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
|
40 |
+
USERNAME=${CURRENT_USER}
|
41 |
+
break
|
42 |
+
fi
|
43 |
+
done
|
44 |
+
if [ "${USERNAME}" = "" ]; then
|
45 |
+
USERNAME=vscode
|
46 |
+
fi
|
47 |
+
elif [ "${USERNAME}" = "none" ]; then
|
48 |
+
USERNAME=root
|
49 |
+
USER_UID=0
|
50 |
+
USER_GID=0
|
51 |
+
fi
|
52 |
+
|
53 |
+
# Load markers to see which steps have already run
|
54 |
+
if [ -f "${MARKER_FILE}" ]; then
|
55 |
+
echo "Marker file found:"
|
56 |
+
cat "${MARKER_FILE}"
|
57 |
+
source "${MARKER_FILE}"
|
58 |
+
fi
|
59 |
+
|
60 |
+
# Ensure apt is in non-interactive to avoid prompts
|
61 |
+
export DEBIAN_FRONTEND=noninteractive
|
62 |
+
|
63 |
+
# Function to call apt-get if needed
|
64 |
+
apt_get_update_if_needed()
|
65 |
+
{
|
66 |
+
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
67 |
+
echo "Running apt-get update..."
|
68 |
+
apt-get update
|
69 |
+
else
|
70 |
+
echo "Skipping apt-get update."
|
71 |
+
fi
|
72 |
+
}
|
73 |
+
|
74 |
+
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
|
75 |
+
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
|
76 |
+
|
77 |
+
package_list="apt-utils \
|
78 |
+
openssh-client \
|
79 |
+
gnupg2 \
|
80 |
+
dirmngr \
|
81 |
+
iproute2 \
|
82 |
+
procps \
|
83 |
+
lsof \
|
84 |
+
htop \
|
85 |
+
net-tools \
|
86 |
+
psmisc \
|
87 |
+
curl \
|
88 |
+
wget \
|
89 |
+
rsync \
|
90 |
+
ca-certificates \
|
91 |
+
unzip \
|
92 |
+
zip \
|
93 |
+
nano \
|
94 |
+
vim-tiny \
|
95 |
+
less \
|
96 |
+
jq \
|
97 |
+
lsb-release \
|
98 |
+
apt-transport-https \
|
99 |
+
dialog \
|
100 |
+
libc6 \
|
101 |
+
libgcc1 \
|
102 |
+
libkrb5-3 \
|
103 |
+
libgssapi-krb5-2 \
|
104 |
+
libicu[0-9][0-9] \
|
105 |
+
liblttng-ust[0-9] \
|
106 |
+
libstdc++6 \
|
107 |
+
zlib1g \
|
108 |
+
locales \
|
109 |
+
sudo \
|
110 |
+
ncdu \
|
111 |
+
man-db \
|
112 |
+
strace \
|
113 |
+
manpages \
|
114 |
+
manpages-dev \
|
115 |
+
init-system-helpers"
|
116 |
+
|
117 |
+
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
|
118 |
+
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
|
119 |
+
# Bring in variables from /etc/os-release like VERSION_CODENAME
|
120 |
+
. /etc/os-release
|
121 |
+
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
122 |
+
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
123 |
+
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
124 |
+
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
125 |
+
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
126 |
+
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
127 |
+
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
128 |
+
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
129 |
+
# Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
|
130 |
+
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
131 |
+
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
132 |
+
echo "Running apt-get update..."
|
133 |
+
apt-get update
|
134 |
+
package_list="${package_list} manpages-posix manpages-posix-dev"
|
135 |
+
else
|
136 |
+
apt_get_update_if_needed
|
137 |
+
fi
|
138 |
+
|
139 |
+
# Install libssl1.1 if available
|
140 |
+
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
|
141 |
+
package_list="${package_list} libssl1.1"
|
142 |
+
fi
|
143 |
+
|
144 |
+
# Install appropriate version of libssl1.0.x if available
|
145 |
+
libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
|
146 |
+
if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
|
147 |
+
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
|
148 |
+
# Debian 9
|
149 |
+
package_list="${package_list} libssl1.0.2"
|
150 |
+
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
|
151 |
+
# Ubuntu 18.04, 16.04, earlier
|
152 |
+
package_list="${package_list} libssl1.0.0"
|
153 |
+
fi
|
154 |
+
fi
|
155 |
+
|
156 |
+
echo "Packages to verify are installed: ${package_list}"
|
157 |
+
apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
|
158 |
+
|
159 |
+
# Install git if not already installed (may be more recent than distro version)
|
160 |
+
if ! type git > /dev/null 2>&1; then
|
161 |
+
apt-get -y install --no-install-recommends git
|
162 |
+
fi
|
163 |
+
|
164 |
+
PACKAGES_ALREADY_INSTALLED="true"
|
165 |
+
fi
|
166 |
+
|
167 |
+
# Get to latest versions of all packages
|
168 |
+
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
|
169 |
+
apt_get_update_if_needed
|
170 |
+
apt-get -y upgrade --no-install-recommends
|
171 |
+
apt-get autoremove -y
|
172 |
+
fi
|
173 |
+
|
174 |
+
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
|
175 |
+
# Common need for both applications and things like the agnoster ZSH theme.
|
176 |
+
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
|
177 |
+
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
|
178 |
+
locale-gen
|
179 |
+
LOCALE_ALREADY_SET="true"
|
180 |
+
fi
|
181 |
+
|
182 |
+
# Create or update a non-root user to match UID/GID.
|
183 |
+
group_name="${USERNAME}"
|
184 |
+
if id -u ${USERNAME} > /dev/null 2>&1; then
|
185 |
+
# User exists, update if needed
|
186 |
+
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then
|
187 |
+
group_name="$(id -gn $USERNAME)"
|
188 |
+
groupmod --gid $USER_GID ${group_name}
|
189 |
+
usermod --gid $USER_GID $USERNAME
|
190 |
+
fi
|
191 |
+
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
|
192 |
+
usermod --uid $USER_UID $USERNAME
|
193 |
+
fi
|
194 |
+
else
|
195 |
+
# Create user
|
196 |
+
if [ "${USER_GID}" = "automatic" ]; then
|
197 |
+
groupadd $USERNAME
|
198 |
+
else
|
199 |
+
groupadd --gid $USER_GID $USERNAME
|
200 |
+
fi
|
201 |
+
if [ "${USER_UID}" = "automatic" ]; then
|
202 |
+
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
|
203 |
+
else
|
204 |
+
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
|
205 |
+
fi
|
206 |
+
fi
|
207 |
+
|
208 |
+
# Add sudo support for non-root user
|
209 |
+
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
|
210 |
+
echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
|
211 |
+
chmod 0440 /etc/sudoers.d/$USERNAME
|
212 |
+
EXISTING_NON_ROOT_USER="${USERNAME}"
|
213 |
+
fi
|
214 |
+
|
215 |
+
# ** Shell customization section **
|
216 |
+
if [ "${USERNAME}" = "root" ]; then
|
217 |
+
user_rc_path="/root"
|
218 |
+
else
|
219 |
+
user_rc_path="/home/${USERNAME}"
|
220 |
+
fi
|
221 |
+
|
222 |
+
# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
|
223 |
+
if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then
|
224 |
+
cp /etc/skel/.bashrc "${user_rc_path}/.bashrc"
|
225 |
+
fi
|
226 |
+
|
227 |
+
# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
|
228 |
+
if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then
|
229 |
+
cp /etc/skel/.profile "${user_rc_path}/.profile"
|
230 |
+
fi
|
231 |
+
|
232 |
+
# .bashrc/.zshrc snippet
|
233 |
+
rc_snippet="$(cat << 'EOF'
|
234 |
+
|
235 |
+
if [ -z "${USER}" ]; then export USER=$(whoami); fi
|
236 |
+
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
|
237 |
+
|
238 |
+
# Display optional first run image specific notice if configured and terminal is interactive
|
239 |
+
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
|
240 |
+
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
|
241 |
+
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
|
242 |
+
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
|
243 |
+
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
|
244 |
+
fi
|
245 |
+
mkdir -p "$HOME/.config/vscode-dev-containers"
|
246 |
+
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
|
247 |
+
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
|
248 |
+
fi
|
249 |
+
|
250 |
+
# Set the default git editor if not already set
|
251 |
+
if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
|
252 |
+
if [ "${TERM_PROGRAM}" = "vscode" ]; then
|
253 |
+
if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then
|
254 |
+
export GIT_EDITOR="code-insiders --wait"
|
255 |
+
else
|
256 |
+
export GIT_EDITOR="code --wait"
|
257 |
+
fi
|
258 |
+
fi
|
259 |
+
fi
|
260 |
+
|
261 |
+
EOF
|
262 |
+
)"
|
263 |
+
|
264 |
+
# code shim, it fallbacks to code-insiders if code is not available
|
265 |
+
cat << 'EOF' > /usr/local/bin/code
|
266 |
+
#!/bin/sh
|
267 |
+
|
268 |
+
get_in_path_except_current() {
|
269 |
+
which -a "$1" | grep -A1 "$0" | grep -v "$0"
|
270 |
+
}
|
271 |
+
|
272 |
+
code="$(get_in_path_except_current code)"
|
273 |
+
|
274 |
+
if [ -n "$code" ]; then
|
275 |
+
exec "$code" "$@"
|
276 |
+
elif [ "$(command -v code-insiders)" ]; then
|
277 |
+
exec code-insiders "$@"
|
278 |
+
else
|
279 |
+
echo "code or code-insiders is not installed" >&2
|
280 |
+
exit 127
|
281 |
+
fi
|
282 |
+
EOF
|
283 |
+
chmod +x /usr/local/bin/code
|
284 |
+
|
285 |
+
# systemctl shim - tells people to use 'service' if systemd is not running
|
286 |
+
cat << 'EOF' > /usr/local/bin/systemctl
|
287 |
+
#!/bin/sh
|
288 |
+
set -e
|
289 |
+
if [ -d "/run/systemd/system" ]; then
|
290 |
+
exec /bin/systemctl "$@"
|
291 |
+
else
|
292 |
+
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services instead. e.g.: \n\nservice --status-all'
|
293 |
+
fi
|
294 |
+
EOF
|
295 |
+
chmod +x /usr/local/bin/systemctl
|
296 |
+
|
297 |
+
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
|
298 |
+
codespaces_bash="$(cat \
|
299 |
+
<<'EOF'
|
300 |
+
|
301 |
+
# Codespaces bash prompt theme
|
302 |
+
__bash_prompt() {
|
303 |
+
local userpart='`export XIT=$? \
|
304 |
+
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
|
305 |
+
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
|
306 |
+
local gitbranch='`\
|
307 |
+
if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
|
308 |
+
export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
|
309 |
+
if [ "${BRANCH}" != "" ]; then \
|
310 |
+
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
|
311 |
+
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
|
312 |
+
echo -n " \[\033[1;33m\]✗"; \
|
313 |
+
fi \
|
314 |
+
&& echo -n "\[\033[0;36m\]) "; \
|
315 |
+
fi; \
|
316 |
+
fi`'
|
317 |
+
local lightblue='\[\033[1;34m\]'
|
318 |
+
local removecolor='\[\033[0m\]'
|
319 |
+
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
|
320 |
+
unset -f __bash_prompt
|
321 |
+
}
|
322 |
+
__bash_prompt
|
323 |
+
|
324 |
+
EOF
|
325 |
+
)"
|
326 |
+
|
327 |
+
codespaces_zsh="$(cat \
|
328 |
+
<<'EOF'
|
329 |
+
# Codespaces zsh prompt theme
|
330 |
+
__zsh_prompt() {
|
331 |
+
local prompt_username
|
332 |
+
if [ ! -z "${GITHUB_USER}" ]; then
|
333 |
+
prompt_username="@${GITHUB_USER}"
|
334 |
+
else
|
335 |
+
prompt_username="%n"
|
336 |
+
fi
|
337 |
+
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
|
338 |
+
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
|
339 |
+
PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
|
340 |
+
PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
|
341 |
+
unset -f __zsh_prompt
|
342 |
+
}
|
343 |
+
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
|
344 |
+
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
|
345 |
+
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
|
346 |
+
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
|
347 |
+
__zsh_prompt
|
348 |
+
|
349 |
+
EOF
|
350 |
+
)"
|
351 |
+
|
352 |
+
# Add RC snippet and custom bash prompt
|
353 |
+
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
|
354 |
+
echo "${rc_snippet}" >> /etc/bash.bashrc
|
355 |
+
echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc"
|
356 |
+
echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc"
|
357 |
+
if [ "${USERNAME}" != "root" ]; then
|
358 |
+
echo "${codespaces_bash}" >> "/root/.bashrc"
|
359 |
+
echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
|
360 |
+
fi
|
361 |
+
chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
|
362 |
+
RC_SNIPPET_ALREADY_ADDED="true"
|
363 |
+
fi
|
364 |
+
|
365 |
+
# Optionally install and configure zsh and Oh My Zsh!
|
366 |
+
if [ "${INSTALL_ZSH}" = "true" ]; then
|
367 |
+
if ! type zsh > /dev/null 2>&1; then
|
368 |
+
apt_get_update_if_needed
|
369 |
+
apt-get install -y zsh
|
370 |
+
fi
|
371 |
+
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
|
372 |
+
echo "${rc_snippet}" >> /etc/zsh/zshrc
|
373 |
+
ZSH_ALREADY_INSTALLED="true"
|
374 |
+
fi
|
375 |
+
|
376 |
+
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
|
377 |
+
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
|
378 |
+
oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
|
379 |
+
if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
|
380 |
+
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
|
381 |
+
user_rc_file="${user_rc_path}/.zshrc"
|
382 |
+
umask g-w,o-w
|
383 |
+
mkdir -p ${oh_my_install_dir}
|
384 |
+
git clone --depth=1 \
|
385 |
+
-c core.eol=lf \
|
386 |
+
-c core.autocrlf=false \
|
387 |
+
-c fsck.zeroPaddedFilemode=ignore \
|
388 |
+
-c fetch.fsck.zeroPaddedFilemode=ignore \
|
389 |
+
-c receive.fsck.zeroPaddedFilemode=ignore \
|
390 |
+
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
|
391 |
+
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
|
392 |
+
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
|
393 |
+
|
394 |
+
mkdir -p ${oh_my_install_dir}/custom/themes
|
395 |
+
echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
|
396 |
+
# Shrink git while still enabling updates
|
397 |
+
cd "${oh_my_install_dir}"
|
398 |
+
git repack -a -d -f --depth=1 --window=1
|
399 |
+
# Copy to non-root user if one is specified
|
400 |
+
if [ "${USERNAME}" != "root" ]; then
|
401 |
+
cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
|
402 |
+
chown -R ${USERNAME}:${group_name} "${user_rc_path}"
|
403 |
+
fi
|
404 |
+
fi
|
405 |
+
fi
|
406 |
+
|
407 |
+
# Persist image metadata info, script if meta.env found in same directory
|
408 |
+
meta_info_script="$(cat << 'EOF'
|
409 |
+
#!/bin/sh
|
410 |
+
. /usr/local/etc/vscode-dev-containers/meta.env
|
411 |
+
|
412 |
+
# Minimal output
|
413 |
+
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
|
414 |
+
echo "${VERSION}"
|
415 |
+
exit 0
|
416 |
+
elif [ "$1" = "release" ]; then
|
417 |
+
echo "${GIT_REPOSITORY_RELEASE}"
|
418 |
+
exit 0
|
419 |
+
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
|
420 |
+
echo "${CONTENTS_URL}"
|
421 |
+
exit 0
|
422 |
+
fi
|
423 |
+
|
424 |
+
#Full output
|
425 |
+
echo
|
426 |
+
echo "Development container image information"
|
427 |
+
echo
|
428 |
+
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
|
429 |
+
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
|
430 |
+
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
|
431 |
+
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
|
432 |
+
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
|
433 |
+
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
|
434 |
+
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
|
435 |
+
echo
|
436 |
+
EOF
|
437 |
+
)"
|
438 |
+
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
|
439 |
+
mkdir -p /usr/local/etc/vscode-dev-containers/
|
440 |
+
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
|
441 |
+
echo "${meta_info_script}" > /usr/local/bin/devcontainer-info
|
442 |
+
chmod +x /usr/local/bin/devcontainer-info
|
443 |
+
fi
|
444 |
+
|
445 |
+
# Write marker file
|
446 |
+
mkdir -p "$(dirname "${MARKER_FILE}")"
|
447 |
+
echo -e "\
|
448 |
+
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
|
449 |
+
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
|
450 |
+
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
|
451 |
+
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
|
452 |
+
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
|
453 |
+
|
454 |
+
echo "Done!"
|
.devcontainer/library-scripts/docker-debian.sh
ADDED
@@ -0,0 +1,355 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env bash
|
2 |
+
#-------------------------------------------------------------------------------------------------------------
|
3 |
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
4 |
+
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
5 |
+
#-------------------------------------------------------------------------------------------------------------
|
6 |
+
#
|
7 |
+
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker.md
|
8 |
+
# Maintainer: The VS Code and Codespaces Teams
|
9 |
+
#
|
10 |
+
# Syntax: ./docker-debian.sh [enable non-root docker socket access flag] [source socket] [target socket] [non-root user] [use moby] [CLI version] [Major version for docker-compose]
|
11 |
+
|
12 |
+
ENABLE_NONROOT_DOCKER=${1:-"true"}
|
13 |
+
SOURCE_SOCKET=${2:-"/var/run/docker-host.sock"}
|
14 |
+
TARGET_SOCKET=${3:-"/var/run/docker.sock"}
|
15 |
+
USERNAME=${4:-"automatic"}
|
16 |
+
USE_MOBY=${5:-"true"}
|
17 |
+
DOCKER_VERSION=${6:-"latest"}
|
18 |
+
DOCKER_DASH_COMPOSE_VERSION=${7:-"v1"} # v1 or v2
|
19 |
+
MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
|
20 |
+
DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="buster bullseye bionic focal jammy"
|
21 |
+
DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="buster bullseye bionic focal hirsute impish jammy"
|
22 |
+
|
23 |
+
set -e
|
24 |
+
|
25 |
+
if [ "$(id -u)" -ne 0 ]; then
|
26 |
+
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
27 |
+
exit 1
|
28 |
+
fi
|
29 |
+
|
30 |
+
# Determine the appropriate non-root user
|
31 |
+
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
|
32 |
+
USERNAME=""
|
33 |
+
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
|
34 |
+
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
|
35 |
+
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
|
36 |
+
USERNAME=${CURRENT_USER}
|
37 |
+
break
|
38 |
+
fi
|
39 |
+
done
|
40 |
+
if [ "${USERNAME}" = "" ]; then
|
41 |
+
USERNAME=root
|
42 |
+
fi
|
43 |
+
elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
|
44 |
+
USERNAME=root
|
45 |
+
fi
|
46 |
+
|
47 |
+
# Get central common setting
|
48 |
+
get_common_setting() {
|
49 |
+
if [ "${common_settings_file_loaded}" != "true" ]; then
|
50 |
+
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
|
51 |
+
common_settings_file_loaded=true
|
52 |
+
fi
|
53 |
+
if [ -f "/tmp/vsdc-settings.env" ]; then
|
54 |
+
local multi_line=""
|
55 |
+
if [ "$2" = "true" ]; then multi_line="-z"; fi
|
56 |
+
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
|
57 |
+
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
|
58 |
+
fi
|
59 |
+
echo "$1=${!1}"
|
60 |
+
}
|
61 |
+
|
62 |
+
# Function to run apt-get if needed
|
63 |
+
apt_get_update_if_needed()
|
64 |
+
{
|
65 |
+
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
66 |
+
echo "Running apt-get update..."
|
67 |
+
apt-get update
|
68 |
+
else
|
69 |
+
echo "Skipping apt-get update."
|
70 |
+
fi
|
71 |
+
}
|
72 |
+
|
73 |
+
# Checks if packages are installed and installs them if not
|
74 |
+
check_packages() {
|
75 |
+
if ! dpkg -s "$@" > /dev/null 2>&1; then
|
76 |
+
apt_get_update_if_needed
|
77 |
+
apt-get -y install --no-install-recommends "$@"
|
78 |
+
fi
|
79 |
+
}
|
80 |
+
|
81 |
+
# Figure out correct version of a three part version number is not passed
|
82 |
+
find_version_from_git_tags() {
|
83 |
+
local variable_name=$1
|
84 |
+
local requested_version=${!variable_name}
|
85 |
+
if [ "${requested_version}" = "none" ]; then return; fi
|
86 |
+
local repository=$2
|
87 |
+
local prefix=${3:-"tags/v"}
|
88 |
+
local separator=${4:-"."}
|
89 |
+
local last_part_optional=${5:-"false"}
|
90 |
+
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
|
91 |
+
local escaped_separator=${separator//./\\.}
|
92 |
+
local last_part
|
93 |
+
if [ "${last_part_optional}" = "true" ]; then
|
94 |
+
last_part="(${escaped_separator}[0-9]+)?"
|
95 |
+
else
|
96 |
+
last_part="${escaped_separator}[0-9]+"
|
97 |
+
fi
|
98 |
+
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
|
99 |
+
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
|
100 |
+
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
|
101 |
+
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
|
102 |
+
else
|
103 |
+
set +e
|
104 |
+
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
|
105 |
+
set -e
|
106 |
+
fi
|
107 |
+
fi
|
108 |
+
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then
|
109 |
+
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
|
110 |
+
exit 1
|
111 |
+
fi
|
112 |
+
echo "${variable_name}=${!variable_name}"
|
113 |
+
}
|
114 |
+
|
115 |
+
# Ensure apt is in non-interactive to avoid prompts
|
116 |
+
export DEBIAN_FRONTEND=noninteractive
|
117 |
+
|
118 |
+
# Install dependencies
|
119 |
+
check_packages apt-transport-https curl ca-certificates gnupg2 dirmngr
|
120 |
+
if ! type git > /dev/null 2>&1; then
|
121 |
+
apt_get_update_if_needed
|
122 |
+
apt-get -y install git
|
123 |
+
fi
|
124 |
+
|
125 |
+
# Source /etc/os-release to get OS info
|
126 |
+
. /etc/os-release
|
127 |
+
# Fetch host/container arch.
|
128 |
+
architecture="$(dpkg --print-architecture)"
|
129 |
+
|
130 |
+
# Check if distro is suppported
|
131 |
+
if [ "${USE_MOBY}" = "true" ]; then
|
132 |
+
# 'get_common_setting' allows attribute to be updated remotely
|
133 |
+
get_common_setting DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES
|
134 |
+
if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then
|
135 |
+
err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution"
|
136 |
+
err "Support distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}"
|
137 |
+
exit 1
|
138 |
+
fi
|
139 |
+
echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}'"
|
140 |
+
else
|
141 |
+
get_common_setting DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES
|
142 |
+
if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then
|
143 |
+
err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution"
|
144 |
+
err "Support distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}"
|
145 |
+
exit 1
|
146 |
+
fi
|
147 |
+
echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}'"
|
148 |
+
fi
|
149 |
+
|
150 |
+
# Set up the necessary apt repos (either Microsoft's or Docker's)
|
151 |
+
if [ "${USE_MOBY}" = "true" ]; then
|
152 |
+
|
153 |
+
cli_package_name="moby-cli"
|
154 |
+
|
155 |
+
# Import key safely and import Microsoft apt repo
|
156 |
+
get_common_setting MICROSOFT_GPG_KEYS_URI
|
157 |
+
curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
|
158 |
+
echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list
|
159 |
+
else
|
160 |
+
# Name of proprietary engine package
|
161 |
+
cli_package_name="docker-ce-cli"
|
162 |
+
|
163 |
+
# Import key safely and import Docker apt repo
|
164 |
+
curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg
|
165 |
+
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list
|
166 |
+
fi
|
167 |
+
|
168 |
+
# Refresh apt lists
|
169 |
+
apt-get update
|
170 |
+
|
171 |
+
# Soft version matching for CLI
|
172 |
+
if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then
|
173 |
+
# Empty, meaning grab whatever "latest" is in apt repo
|
174 |
+
cli_version_suffix=""
|
175 |
+
else
|
176 |
+
# Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...)
|
177 |
+
docker_version_dot_escaped="${DOCKER_VERSION//./\\.}"
|
178 |
+
docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}"
|
179 |
+
# Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/
|
180 |
+
docker_version_regex="^(.+:)?${docker_version_dot_plus_escaped}([\\.\\+ ~:-]|$)"
|
181 |
+
set +e # Don't exit if finding version fails - will handle gracefully
|
182 |
+
cli_version_suffix="=$(apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")"
|
183 |
+
set -e
|
184 |
+
if [ -z "${cli_version_suffix}" ] || [ "${cli_version_suffix}" = "=" ]; then
|
185 |
+
echo "(!) No full or partial Docker / Moby version match found for \"${DOCKER_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:"
|
186 |
+
apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+'
|
187 |
+
exit 1
|
188 |
+
fi
|
189 |
+
echo "cli_version_suffix ${cli_version_suffix}"
|
190 |
+
fi
|
191 |
+
|
192 |
+
# Install Docker / Moby CLI if not already installed
|
193 |
+
if type docker > /dev/null 2>&1; then
|
194 |
+
echo "Docker / Moby CLI already installed."
|
195 |
+
else
|
196 |
+
if [ "${USE_MOBY}" = "true" ]; then
|
197 |
+
apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx
|
198 |
+
apt-get -y install --no-install-recommends moby-compose || echo "(*) Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping."
|
199 |
+
else
|
200 |
+
apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix}
|
201 |
+
apt-get -y install --no-install-recommends docker-compose-plugin || echo "(*) Package docker-compose-plugin (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping."
|
202 |
+
fi
|
203 |
+
fi
|
204 |
+
|
205 |
+
# Install Docker Compose if not already installed and is on a supported architecture
|
206 |
+
if type docker-compose > /dev/null 2>&1; then
|
207 |
+
echo "Docker Compose already installed."
|
208 |
+
else
|
209 |
+
TARGET_COMPOSE_ARCH="$(uname -m)"
|
210 |
+
if [ "${TARGET_COMPOSE_ARCH}" = "amd64" ]; then
|
211 |
+
TARGET_COMPOSE_ARCH="x86_64"
|
212 |
+
fi
|
213 |
+
if [ "${TARGET_COMPOSE_ARCH}" != "x86_64" ]; then
|
214 |
+
# Use pip to get a version that runns on this architecture
|
215 |
+
if ! dpkg -s python3-minimal python3-pip libffi-dev python3-venv > /dev/null 2>&1; then
|
216 |
+
apt_get_update_if_needed
|
217 |
+
apt-get -y install python3-minimal python3-pip libffi-dev python3-venv
|
218 |
+
fi
|
219 |
+
export PIPX_HOME=/usr/local/pipx
|
220 |
+
mkdir -p ${PIPX_HOME}
|
221 |
+
export PIPX_BIN_DIR=/usr/local/bin
|
222 |
+
export PYTHONUSERBASE=/tmp/pip-tmp
|
223 |
+
export PIP_CACHE_DIR=/tmp/pip-tmp/cache
|
224 |
+
pipx_bin=pipx
|
225 |
+
if ! type pipx > /dev/null 2>&1; then
|
226 |
+
pip3 install --disable-pip-version-check --no-cache-dir --user pipx
|
227 |
+
pipx_bin=/tmp/pip-tmp/bin/pipx
|
228 |
+
fi
|
229 |
+
${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' docker-compose
|
230 |
+
rm -rf /tmp/pip-tmp
|
231 |
+
else
|
232 |
+
compose_v1_version="1"
|
233 |
+
find_version_from_git_tags compose_v1_version "https://github.com/docker/compose" "tags/"
|
234 |
+
echo "(*) Installing docker-compose ${compose_v1_version}..."
|
235 |
+
curl -fsSL "https://github.com/docker/compose/releases/download/${compose_v1_version}/docker-compose-Linux-x86_64" -o /usr/local/bin/docker-compose
|
236 |
+
chmod +x /usr/local/bin/docker-compose
|
237 |
+
fi
|
238 |
+
fi
|
239 |
+
|
240 |
+
# Install docker-compose switch if not already installed - https://github.com/docker/compose-switch#manual-installation
|
241 |
+
current_v1_compose_path="$(which docker-compose)"
|
242 |
+
target_v1_compose_path="$(dirname "${current_v1_compose_path}")/docker-compose-v1"
|
243 |
+
if ! type compose-switch > /dev/null 2>&1; then
|
244 |
+
echo "(*) Installing compose-switch..."
|
245 |
+
compose_switch_version="latest"
|
246 |
+
find_version_from_git_tags compose_switch_version "https://github.com/docker/compose-switch"
|
247 |
+
curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch
|
248 |
+
chmod +x /usr/local/bin/compose-switch
|
249 |
+
# TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11
|
250 |
+
|
251 |
+
# Setup v1 CLI as alternative in addition to compose-switch (which maps to v2)
|
252 |
+
mv "${current_v1_compose_path}" "${target_v1_compose_path}"
|
253 |
+
update-alternatives --install /usr/local/bin/docker-compose docker-compose /usr/local/bin/compose-switch 99
|
254 |
+
update-alternatives --install /usr/local/bin/docker-compose docker-compose "${target_v1_compose_path}" 1
|
255 |
+
fi
|
256 |
+
if [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then
|
257 |
+
update-alternatives --set docker-compose "${target_v1_compose_path}"
|
258 |
+
else
|
259 |
+
update-alternatives --set docker-compose /usr/local/bin/compose-switch
|
260 |
+
fi
|
261 |
+
|
262 |
+
# If init file already exists, exit
|
263 |
+
if [ -f "/usr/local/share/docker-init.sh" ]; then
|
264 |
+
exit 0
|
265 |
+
fi
|
266 |
+
echo "docker-init doesnt exist, adding..."
|
267 |
+
|
268 |
+
# By default, make the source and target sockets the same
|
269 |
+
if [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ]; then
|
270 |
+
touch "${SOURCE_SOCKET}"
|
271 |
+
ln -s "${SOURCE_SOCKET}" "${TARGET_SOCKET}"
|
272 |
+
fi
|
273 |
+
|
274 |
+
# Add a stub if not adding non-root user access, user is root
|
275 |
+
if [ "${ENABLE_NONROOT_DOCKER}" = "false" ] || [ "${USERNAME}" = "root" ]; then
|
276 |
+
echo -e '#!/usr/bin/env bash\nexec "$@"' > /usr/local/share/docker-init.sh
|
277 |
+
chmod +x /usr/local/share/docker-init.sh
|
278 |
+
exit 0
|
279 |
+
fi
|
280 |
+
|
281 |
+
# Setup a docker group in the event the docker socket's group is not root
|
282 |
+
if ! grep -qE '^docker:' /etc/group; then
|
283 |
+
groupadd --system docker
|
284 |
+
fi
|
285 |
+
usermod -aG docker "${USERNAME}"
|
286 |
+
DOCKER_GID="$(grep -oP '^docker:x:\K[^:]+' /etc/group)"
|
287 |
+
|
288 |
+
# If enabling non-root access and specified user is found, setup socat and add script
|
289 |
+
chown -h "${USERNAME}":root "${TARGET_SOCKET}"
|
290 |
+
if ! dpkg -s socat > /dev/null 2>&1; then
|
291 |
+
apt_get_update_if_needed
|
292 |
+
apt-get -y install socat
|
293 |
+
fi
|
294 |
+
tee /usr/local/share/docker-init.sh > /dev/null \
|
295 |
+
<< EOF
|
296 |
+
#!/usr/bin/env bash
|
297 |
+
#-------------------------------------------------------------------------------------------------------------
|
298 |
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
299 |
+
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
300 |
+
#-------------------------------------------------------------------------------------------------------------
|
301 |
+
|
302 |
+
set -e
|
303 |
+
|
304 |
+
SOCAT_PATH_BASE=/tmp/vscr-docker-from-docker
|
305 |
+
SOCAT_LOG=\${SOCAT_PATH_BASE}.log
|
306 |
+
SOCAT_PID=\${SOCAT_PATH_BASE}.pid
|
307 |
+
|
308 |
+
# Wrapper function to only use sudo if not already root
|
309 |
+
sudoIf()
|
310 |
+
{
|
311 |
+
if [ "\$(id -u)" -ne 0 ]; then
|
312 |
+
sudo "\$@"
|
313 |
+
else
|
314 |
+
"\$@"
|
315 |
+
fi
|
316 |
+
}
|
317 |
+
|
318 |
+
# Log messages
|
319 |
+
log()
|
320 |
+
{
|
321 |
+
echo -e "[\$(date)] \$@" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
|
322 |
+
}
|
323 |
+
|
324 |
+
echo -e "\n** \$(date) **" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
|
325 |
+
log "Ensuring ${USERNAME} has access to ${SOURCE_SOCKET} via ${TARGET_SOCKET}"
|
326 |
+
|
327 |
+
# If enabled, try to update the docker group with the right GID. If the group is root,
|
328 |
+
# fall back on using socat to forward the docker socket to another unix socket so
|
329 |
+
# that we can set permissions on it without affecting the host.
|
330 |
+
if [ "${ENABLE_NONROOT_DOCKER}" = "true" ] && [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ] && [ "${USERNAME}" != "root" ] && [ "${USERNAME}" != "0" ]; then
|
331 |
+
SOCKET_GID=\$(stat -c '%g' ${SOURCE_SOCKET})
|
332 |
+
if [ "\${SOCKET_GID}" != "0" ] && [ "\${SOCKET_GID}" != "${DOCKER_GID}" ] && ! grep -E ".+:x:\${SOCKET_GID}" /etc/group; then
|
333 |
+
sudoIf groupmod --gid "\${SOCKET_GID}" docker
|
334 |
+
else
|
335 |
+
# Enable proxy if not already running
|
336 |
+
if [ ! -f "\${SOCAT_PID}" ] || ! ps -p \$(cat \${SOCAT_PID}) > /dev/null; then
|
337 |
+
log "Enabling socket proxy."
|
338 |
+
log "Proxying ${SOURCE_SOCKET} to ${TARGET_SOCKET} for vscode"
|
339 |
+
sudoIf rm -rf ${TARGET_SOCKET}
|
340 |
+
(sudoIf socat UNIX-LISTEN:${TARGET_SOCKET},fork,mode=660,user=${USERNAME} UNIX-CONNECT:${SOURCE_SOCKET} 2>&1 | sudoIf tee -a \${SOCAT_LOG} > /dev/null & echo "\$!" | sudoIf tee \${SOCAT_PID} > /dev/null)
|
341 |
+
else
|
342 |
+
log "Socket proxy already running."
|
343 |
+
fi
|
344 |
+
fi
|
345 |
+
log "Success"
|
346 |
+
fi
|
347 |
+
|
348 |
+
# Execute whatever commands were passed in (if any). This allows us
|
349 |
+
# to set this script to ENTRYPOINT while still executing the default CMD.
|
350 |
+
set +e
|
351 |
+
exec "\$@"
|
352 |
+
EOF
|
353 |
+
chmod +x /usr/local/share/docker-init.sh
|
354 |
+
chown ${USERNAME}:root /usr/local/share/docker-init.sh
|
355 |
+
echo "Done!"
|
.dockerignore
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
**/__pycache__
|
2 |
+
.devcontainer
|
3 |
+
sandbox
|
4 |
+
Dockerfile
|
5 |
+
Makefile
|
6 |
+
post-devcontainer.sh
|
7 |
+
pdf_downloads
|
8 |
+
pdf_images
|
9 |
+
output
|
10 |
+
pdf_images
|
11 |
+
input
|
12 |
+
.github
|
.gitattributes
CHANGED
@@ -1,35 +1 @@
|
|
1 |
-
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
*.pt filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
.gitignore
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
input
|
2 |
+
output
|
3 |
+
**/__pycache__
|
4 |
+
sandbox/dataset
|
5 |
+
sandbox/dataset*
|
6 |
+
sandbox/images
|
7 |
+
sandbox/runs/**
|
8 |
+
sandbox/runs
|
9 |
+
**/*.Identifier
|
10 |
+
images
|
11 |
+
yolo*.pt
|
12 |
+
*.jpg
|
13 |
+
**/.jpg
|
14 |
+
pdf_downloads
|
15 |
+
|
.gitmodules
ADDED
File without changes
|
Dockerfile
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM nvidia/cuda:12.3.1-devel-ubuntu22.04
|
2 |
+
|
3 |
+
ARG GRADIO_SERVER_PORT=7860
|
4 |
+
ENV GRADIO_SERVER_PORT=${GRADIO_SERVER_PORT}
|
5 |
+
|
6 |
+
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
7 |
+
&& apt-get -y install --no-install-recommends \
|
8 |
+
vim \
|
9 |
+
git \
|
10 |
+
git-lfs \
|
11 |
+
zsh \
|
12 |
+
tmux \
|
13 |
+
wget \
|
14 |
+
python3 \
|
15 |
+
python3-dev \
|
16 |
+
python3-pip \
|
17 |
+
python3-setuptools \
|
18 |
+
libgl1-mesa-dev \
|
19 |
+
libglib2.0-0
|
20 |
+
|
21 |
+
RUN git lfs install
|
22 |
+
|
23 |
+
# Set up a new user named "user" with user ID 1000
|
24 |
+
RUN useradd -m -u 1000 user
|
25 |
+
|
26 |
+
# Switch to the "user" user
|
27 |
+
USER user
|
28 |
+
|
29 |
+
# Set home to the user's home directory
|
30 |
+
ENV HOME=/home/user \
|
31 |
+
PATH=/home/user/.local/bin:$PATH
|
32 |
+
|
33 |
+
# Set the working directory to the user's home directory
|
34 |
+
WORKDIR $HOME/app
|
35 |
+
|
36 |
+
# Try and run pip command after setting the user with `USER user` to avoid permission issues with Python
|
37 |
+
RUN pip install --no-cache-dir --upgrade pip
|
38 |
+
COPY --chown=user requirements.txt $HOME/app/
|
39 |
+
RUN pip install -r $HOME/app/requirements.txt
|
40 |
+
|
41 |
+
# Copy the current directory contents into the container at $HOME/app setting the owner to the user
|
42 |
+
COPY --chown=user app.py README.md $HOME/app/
|
43 |
+
COPY --chown=user app.py model/ $HOME/app/model/
|
44 |
+
COPY --chown=user app.py solareyes/ $HOME/app/solareyes/
|
45 |
+
|
46 |
+
EXPOSE 7860
|
47 |
+
|
48 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
Makefile
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
APP ?= solar-eyes
|
2 |
+
|
3 |
+
build:
|
4 |
+
docker build -t $(APP) .
|
5 |
+
|
6 |
+
run:
|
7 |
+
docker run -it --rm --gpus all -p 7860:7860 $(APP)
|
README.md
CHANGED
@@ -1,12 +1,9 @@
|
|
1 |
---
|
2 |
-
title: Solar
|
3 |
-
emoji:
|
4 |
-
colorFrom: purple
|
5 |
-
colorTo: purple
|
6 |
-
sdk: gradio
|
7 |
-
sdk_version: 4.19.2
|
8 |
app_file: app.py
|
9 |
-
|
|
|
|
|
|
|
10 |
---
|
11 |
-
|
12 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
---
|
2 |
+
title: Solar-Eyes-Dockerized
|
3 |
+
emoji: ☀️
|
|
|
|
|
|
|
|
|
4 |
app_file: app.py
|
5 |
+
sdk: docker
|
6 |
+
app_port: 7860
|
7 |
+
colorFrom: gray
|
8 |
+
colorTo: blue
|
9 |
---
|
|
|
|
app.py
ADDED
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import os
|
3 |
+
import random
|
4 |
+
|
5 |
+
from fastapi import FastAPI, UploadFile
|
6 |
+
from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse
|
7 |
+
import gradio as gr
|
8 |
+
from PIL import Image
|
9 |
+
import PIL
|
10 |
+
import numpy as np
|
11 |
+
import pypdfium2 as pdfium
|
12 |
+
from ultralytics import YOLO
|
13 |
+
from ultralytics.engine.results import Results, Masks
|
14 |
+
import uvicorn
|
15 |
+
import cv2
|
16 |
+
import uuid
|
17 |
+
from functools import partial
|
18 |
+
|
19 |
+
# from solareyes.sam import SAM
|
20 |
+
|
21 |
+
app = FastAPI()
|
22 |
+
|
23 |
+
# Load the model
|
24 |
+
model: YOLO = YOLO('model/autodistill_best.pt') # Path to trained model
|
25 |
+
seg_model: YOLO = YOLO('model/autodistill_best_seg.pt') # Path to trained model
|
26 |
+
|
27 |
+
# Directories
|
28 |
+
image_dir = './pdf_images/'
|
29 |
+
cropped_dir = './output/'
|
30 |
+
pdf_dir = './pdf_downloads/'
|
31 |
+
os.makedirs(image_dir, exist_ok=True)
|
32 |
+
os.makedirs(cropped_dir, exist_ok=True)
|
33 |
+
os.makedirs(pdf_dir, exist_ok=True)
|
34 |
+
|
35 |
+
HTML = """
|
36 |
+
<!DOCTYPE html>
|
37 |
+
<html>
|
38 |
+
<h1>Gradio Request Demo</h1>
|
39 |
+
<p>Click the button to be redirected to the gradio app!</p>
|
40 |
+
<button onclick="window.location.pathname='/gradio'">Redirect</button>
|
41 |
+
</html>
|
42 |
+
"""
|
43 |
+
|
44 |
+
# sam = SAM()
|
45 |
+
|
46 |
+
@app.get("/")
|
47 |
+
def read_main():
|
48 |
+
return HTMLResponse(HTML)
|
49 |
+
|
50 |
+
|
51 |
+
# @app.get("/foo")
|
52 |
+
# def redirect():
|
53 |
+
# return RedirectResponse("/gradio")
|
54 |
+
|
55 |
+
|
56 |
+
def detect_solar_panel(image) -> Results:
|
57 |
+
# Perform inference
|
58 |
+
results: Results = model(image)
|
59 |
+
return results
|
60 |
+
|
61 |
+
|
62 |
+
def segment_solar_panel(image) -> Results:
|
63 |
+
# Perform inference
|
64 |
+
results: Results = seg_model.predict(image, imgsz=(841, 595), retina_masks=True)
|
65 |
+
return results
|
66 |
+
|
67 |
+
def resize_and_pad(subject_image: Image.Image):
|
68 |
+
# Resize subject image to 80% of 1200px while maintaining aspect ratio
|
69 |
+
target_height = int(1200 * 0.8)
|
70 |
+
aspect_ratio = subject_image.width / subject_image.height
|
71 |
+
new_width = int(target_height * aspect_ratio)
|
72 |
+
resized_subject = subject_image.resize((new_width, target_height), Image.LANCZOS)
|
73 |
+
|
74 |
+
# Create a new transparent image
|
75 |
+
new_image = Image.new("RGBA", (1200, 1200), (0, 0, 0, 0))
|
76 |
+
|
77 |
+
# Calculate the position to paste the resized subject image
|
78 |
+
x = (1200 - new_width) // 2
|
79 |
+
y = (1200 - target_height) // 2
|
80 |
+
|
81 |
+
# Paste the resized subject image onto the transparent image
|
82 |
+
new_image.paste(resized_subject, (x, y), resized_subject)
|
83 |
+
|
84 |
+
# Save or return the PNG image
|
85 |
+
png_image = new_image
|
86 |
+
|
87 |
+
# Create a new image with a white background
|
88 |
+
jpg_image = Image.new("RGB", (1200, 1200), (255, 255, 255))
|
89 |
+
jpg_image.paste(png_image, (0, 0), png_image)
|
90 |
+
|
91 |
+
# Save or return the JPEG image
|
92 |
+
return png_image, jpg_image
|
93 |
+
|
94 |
+
|
95 |
+
def segment_image_core(img: np.ndarray | Image.Image) -> Image.Image:
|
96 |
+
if type(img) is np.ndarray:
|
97 |
+
img = Image.fromarray(img)
|
98 |
+
results = segment_solar_panel(img)
|
99 |
+
sections = []
|
100 |
+
for i, result in enumerate(results):
|
101 |
+
print(f"Result {i}")
|
102 |
+
result: Results
|
103 |
+
try:
|
104 |
+
h2, w2, c2 = result.orig_img.shape
|
105 |
+
# Deal with boxes
|
106 |
+
i = 0
|
107 |
+
for box in result.boxes:
|
108 |
+
x1, y1, x2, y2 = box.xyxy[0].tolist()
|
109 |
+
sections.append(((int(x1), int(y1), int(x2), int(y2)), f"{section_labels[0]} Bounding Box - index {i} - conf {box.conf}"))
|
110 |
+
# Now the masks
|
111 |
+
masks: Masks = result.masks
|
112 |
+
try:
|
113 |
+
mask = masks[i]
|
114 |
+
cpu_mask = mask.cpu()
|
115 |
+
squeezed_mask = cpu_mask.data.numpy()
|
116 |
+
transposed_mask = squeezed_mask.transpose(1, 2, 0)
|
117 |
+
kernel = cv2.getStructuringElement(cv2.MORPH_OPEN, (11, 11))
|
118 |
+
opened_mask = cv2.morphologyEx(transposed_mask, cv2.MORPH_OPEN, kernel, iterations=3)
|
119 |
+
cv_mask = cv2.resize(opened_mask, (w2, h2))
|
120 |
+
image_mask = Image.fromarray((cv_mask * 255).astype(np.uint8)).filter(PIL.ImageFilter.GaussianBlur(1))
|
121 |
+
img_out = img.copy()
|
122 |
+
img_out.putalpha(image_mask)
|
123 |
+
img_out = img_out.crop((x1, y1, x2, y2))
|
124 |
+
png_img, jpg_img = resize_and_pad(img_out)
|
125 |
+
sections.append((cv_mask, f"{section_labels[0]} Mask - Index: {i}"))
|
126 |
+
except TypeError as e:
|
127 |
+
print(f"Error processing image: {e}, probably no masks.")
|
128 |
+
i += 1
|
129 |
+
except IndexError as e:
|
130 |
+
print(f"Error processing image: {e}, probably no boxes.")
|
131 |
+
return (img, sections), jpg_img
|
132 |
+
|
133 |
+
|
134 |
+
def process_pdf_core(pdf) -> Image.Image:
|
135 |
+
pdf = pdfium.PdfDocument(pdf)
|
136 |
+
img_input.clear()
|
137 |
+
|
138 |
+
# Get just the first page
|
139 |
+
page = pdf[0]
|
140 |
+
image = page.render(scale=4).to_pil()
|
141 |
+
return image
|
142 |
+
|
143 |
+
|
144 |
+
with gr.Blocks() as demo:
|
145 |
+
section_labels = ['Solar Panel']
|
146 |
+
|
147 |
+
|
148 |
+
def segment_image(img):
|
149 |
+
img_sections, jpg_img = segment_image_core(img)
|
150 |
+
return img_sections
|
151 |
+
|
152 |
+
|
153 |
+
# def process_image(img):
|
154 |
+
# results = detect_solar_panel(img)
|
155 |
+
# sections = []
|
156 |
+
# for result in results:
|
157 |
+
# result: Results
|
158 |
+
# # print(result)
|
159 |
+
# try:
|
160 |
+
# boxes = result.boxes.xyxy[0].tolist()
|
161 |
+
# # Unpack boxes
|
162 |
+
# x1, y1, x2, y2 = boxes
|
163 |
+
# sections.append(((int(x1), int(y1), int(x2), int(y2)), f"{section_labels[0]} Bounding Box"))
|
164 |
+
# #Create 4 centroids around the true centroid shifted by a delta value
|
165 |
+
# delta = 0.3
|
166 |
+
# delta_x = (x2 - x1) * delta
|
167 |
+
# delta_y = (y2 - y1) * delta
|
168 |
+
# x_centroid = (x1 + x2) / 2
|
169 |
+
# y_centroid = (y1 + y2) / 2
|
170 |
+
# xtop_centroid = x_centroid
|
171 |
+
# ytop_centroid = y_centroid + delta_y
|
172 |
+
# xright_centroid = x_centroid + delta_x
|
173 |
+
# yright_centroid = y_centroid
|
174 |
+
# xbottom_centroid = x_centroid
|
175 |
+
# ybottom_centroid = y_centroid - delta_y
|
176 |
+
# xleft_centroid = x_centroid - delta_x
|
177 |
+
# yleft_centroid = y_centroid
|
178 |
+
# sam_mask, sam_scores = sam.segment(img, [[
|
179 |
+
# [xtop_centroid, ytop_centroid],
|
180 |
+
# [xright_centroid, yright_centroid],
|
181 |
+
# [xbottom_centroid, ybottom_centroid],
|
182 |
+
# [xleft_centroid, yleft_centroid]
|
183 |
+
# ]])
|
184 |
+
# squeezed_sam_mask_tensor = sam_mask[0].squeeze()
|
185 |
+
# squeezed_sam_scores_tensor = sam_scores[0].squeeze()
|
186 |
+
# print(f"sqeezed sam mask shape {squeezed_sam_mask_tensor.shape}")
|
187 |
+
# print(f"sqeezed sam scores shape {squeezed_sam_scores_tensor.shape}")
|
188 |
+
# for i in range(0, squeezed_sam_mask_tensor.shape[0]):
|
189 |
+
# flat_mask = squeezed_sam_mask_tensor[i].numpy()
|
190 |
+
# sections.append((flat_mask, f"{section_labels[0]} Mask {i} - Score: {squeezed_sam_scores_tensor[i]}"))
|
191 |
+
# i += 1
|
192 |
+
# except IndexError as e:
|
193 |
+
# print(f"Error processing image: {e}, probably no boxes.")
|
194 |
+
# return (img, sections)
|
195 |
+
|
196 |
+
|
197 |
+
def process_pdf(pdf):
|
198 |
+
image = process_pdf_core(pdf)
|
199 |
+
return segment_image(image)
|
200 |
+
|
201 |
+
|
202 |
+
with gr.Row():
|
203 |
+
img_input = gr.Image(label="Upload Image", height=400)
|
204 |
+
img_output = gr.AnnotatedImage(height=400)
|
205 |
+
|
206 |
+
section_btn = gr.Button("Identify Solar Panel From Image")
|
207 |
+
|
208 |
+
# Choose a random file in input directory
|
209 |
+
gr.Examples(
|
210 |
+
inputs = img_input,
|
211 |
+
# examples = [os.path.join(image_dir, file) for file in random.sample(os.listdir(image_dir), 15)]
|
212 |
+
examples = [os.path.join(image_dir, file) for file in os.listdir(image_dir)],
|
213 |
+
)
|
214 |
+
|
215 |
+
with gr.Row():
|
216 |
+
pdf_input = gr.File(label="Upload PDF", file_types=['pdf'], height=200)
|
217 |
+
|
218 |
+
pdf_btn = gr.Button("Identify Solar Panel from PDF")
|
219 |
+
|
220 |
+
gr.Examples(
|
221 |
+
inputs = pdf_input,
|
222 |
+
examples = [os.path.join(pdf_dir, file) for file in os.listdir(pdf_dir)],
|
223 |
+
)
|
224 |
+
|
225 |
+
section_btn.click(segment_image, [img_input], img_output)
|
226 |
+
pdf_btn.click(process_pdf, [pdf_input], img_output)
|
227 |
+
|
228 |
+
#Accept a PDF file, return a jpeg image
|
229 |
+
@app.post("/uploadPdf", response_class=FileResponse)
|
230 |
+
def extract_image(uploadFile: UploadFile) -> FileResponse:
|
231 |
+
file = uploadFile.file.read()
|
232 |
+
image = process_pdf_core(file)
|
233 |
+
img_segments, jpeg_image = segment_image_core(image)
|
234 |
+
id = str(uuid.uuid4())
|
235 |
+
filename = f"{cropped_dir}/cropped_{id}.jpg"
|
236 |
+
jpeg_image.save(filename)
|
237 |
+
return FileResponse(filename)
|
238 |
+
|
239 |
+
app = gr.mount_gradio_app(app, demo, path="/gradio")
|
240 |
+
|
241 |
+
if __name__ == "__main__":
|
242 |
+
# app = gr.mount_gradio_app(app, demo, path="/gradio")
|
243 |
+
uvicorn.run(app, port=7860)
|
244 |
+
# demo.launch(share=True)
|
245 |
+
# demo.launch(share=True, auth=(os.environ.get("GRADIO_USERNAME"), os.environ.get("GRADIO_PASSWORD")))
|
download_pdfs.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from bs4 import BeautifulSoup
|
3 |
+
import os
|
4 |
+
|
5 |
+
# URL setup
|
6 |
+
base_url = "http://74.208.61.158:8888"
|
7 |
+
page_url = base_url + "/list-pdfs" # if the PDF links are on the home page
|
8 |
+
|
9 |
+
# Directory for storing PDFs
|
10 |
+
pdf_dir = "pdf_downloads"
|
11 |
+
os.makedirs(pdf_dir, exist_ok=True)
|
12 |
+
|
13 |
+
# Fetch the webpage
|
14 |
+
response = requests.get(page_url)
|
15 |
+
response.raise_for_status() # will raise an exception for HTTP error codes
|
16 |
+
|
17 |
+
# Parse the webpage
|
18 |
+
soup = BeautifulSoup(response.content, 'html.parser')
|
19 |
+
|
20 |
+
# Find all PDF links
|
21 |
+
for link in soup.find_all('a', href=True):
|
22 |
+
href = link['href']
|
23 |
+
if href.endswith('.pdf'):
|
24 |
+
pdf_url = base_url + href
|
25 |
+
pdf_response = requests.get(pdf_url)
|
26 |
+
pdf_response.raise_for_status()
|
27 |
+
|
28 |
+
# Write the PDF to a file
|
29 |
+
pdf_filename = os.path.join(pdf_dir, href.split('/')[-1])
|
30 |
+
with open(pdf_filename, 'wb') as file:
|
31 |
+
file.write(pdf_response.content)
|
32 |
+
|
33 |
+
print(f"Downloaded: {pdf_filename}")
|
model/autodistill_best.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:961a8daaa02bf1a0976496b9ebfc60a14017ad5f1c5f21eea9f66646cb981759
|
3 |
+
size 6233305
|
model/autodistill_best_seg.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cf418dd347f39d8f707e7f00c5f765c0a46309ae06e0e23770593c1736d90ebb
|
3 |
+
size 23865763
|
model/solar_best.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fd33f07c2793738635f23d093bc7a3fe46444b36716f2445c2fa0359b871b918
|
3 |
+
size 6245678
|
post-devcontainer.sh
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
python3 -m pip install -r requirements.txt
|
render_pdfs.py
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pypdfium2 as pdfium
|
2 |
+
import os
|
3 |
+
|
4 |
+
# Directory containing PDFs
|
5 |
+
pdf_dir = './pdf_downloads'
|
6 |
+
# Directory to store images
|
7 |
+
image_dir = './pdf_images'
|
8 |
+
os.makedirs(image_dir, exist_ok=True)
|
9 |
+
|
10 |
+
# Process each PDF
|
11 |
+
for pdf_file in os.listdir(pdf_dir):
|
12 |
+
if pdf_file.endswith('.pdf'):
|
13 |
+
pdf_path = os.path.join(pdf_dir, pdf_file)
|
14 |
+
image_path = os.path.join(image_dir, pdf_file.replace('.pdf', '.jpg'))
|
15 |
+
|
16 |
+
# Open the PDF
|
17 |
+
print(f"Attempting to convert {pdf_path}")
|
18 |
+
try:
|
19 |
+
pdf = pdfium.PdfDocument(pdf_path)
|
20 |
+
except Exception as e:
|
21 |
+
print(f"Unable to convert {pdf_path} due to {e}")
|
22 |
+
|
23 |
+
|
24 |
+
|
25 |
+
# Get just the first page
|
26 |
+
page = pdf[0]
|
27 |
+
image = page.render(scale=4).to_pil()
|
28 |
+
|
29 |
+
# Save the rendered page as a JPEG image
|
30 |
+
image.save(image_path, 'JPEG')
|
31 |
+
|
32 |
+
print(f"Converted {pdf_path} to {image_path}")
|
requirements.txt
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
ultralytics
|
2 |
+
pillow
|
3 |
+
gradio
|
4 |
+
fastapi[all]
|
5 |
+
gunicorn
|
6 |
+
typer[all]
|
7 |
+
pypdfium2
|
8 |
+
torch
|
9 |
+
torchvision
|
10 |
+
torchaudio
|
11 |
+
transformers
|
12 |
+
pillow
|
13 |
+
ipykernel
|
14 |
+
ultralytics
|
15 |
+
requests
|
16 |
+
bs4
|
sandbox/autodistill-yolo.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sandbox/how-to-auto-train-yolov8-model-with-autodistill copy.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sandbox/mask_hell.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
sandbox/sandbox.ipynb
ADDED
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 2,
|
6 |
+
"metadata": {},
|
7 |
+
"outputs": [
|
8 |
+
{
|
9 |
+
"name": "stderr",
|
10 |
+
"output_type": "stream",
|
11 |
+
"text": [
|
12 |
+
"/home/vscode/.local/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
13 |
+
" from .autonotebook import tqdm as notebook_tqdm\n"
|
14 |
+
]
|
15 |
+
}
|
16 |
+
],
|
17 |
+
"source": [
|
18 |
+
"from solareyes.sam import SAM\n",
|
19 |
+
"\n",
|
20 |
+
"sam = SAM()\n",
|
21 |
+
"\n"
|
22 |
+
]
|
23 |
+
},
|
24 |
+
{
|
25 |
+
"cell_type": "code",
|
26 |
+
"execution_count": 3,
|
27 |
+
"metadata": {},
|
28 |
+
"outputs": [],
|
29 |
+
"source": [
|
30 |
+
"import requests\n",
|
31 |
+
"from PIL import Image\n",
|
32 |
+
"\n",
|
33 |
+
"img_url = \"https://huggingface.co/ybelkada/segment-anything/resolve/main/assets/car.png\"\n",
|
34 |
+
"raw_image = Image.open(requests.get(img_url, stream=True).raw).convert(\"RGB\")\n",
|
35 |
+
"input_points = [[[450, 600]]] # 2D location of a window in the image"
|
36 |
+
]
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"cell_type": "code",
|
40 |
+
"execution_count": 4,
|
41 |
+
"metadata": {},
|
42 |
+
"outputs": [],
|
43 |
+
"source": [
|
44 |
+
"\n",
|
45 |
+
"masks, scores = sam.segment(raw_image, input_points)"
|
46 |
+
]
|
47 |
+
},
|
48 |
+
{
|
49 |
+
"cell_type": "code",
|
50 |
+
"execution_count": 8,
|
51 |
+
"metadata": {},
|
52 |
+
"outputs": [
|
53 |
+
{
|
54 |
+
"data": {
|
55 |
+
"text/plain": [
|
56 |
+
"False"
|
57 |
+
]
|
58 |
+
},
|
59 |
+
"execution_count": 8,
|
60 |
+
"metadata": {},
|
61 |
+
"output_type": "execute_result"
|
62 |
+
}
|
63 |
+
],
|
64 |
+
"source": [
|
65 |
+
"flat_first_mask = masks[0][0][0]\n",
|
66 |
+
"ffmnp = flat_first_mask.numpy()\n",
|
67 |
+
"ffmnp.shape\n",
|
68 |
+
"ffmnp[0][0]"
|
69 |
+
]
|
70 |
+
},
|
71 |
+
{
|
72 |
+
"cell_type": "code",
|
73 |
+
"execution_count": 1,
|
74 |
+
"metadata": {},
|
75 |
+
"outputs": [
|
76 |
+
{
|
77 |
+
"ename": "NameError",
|
78 |
+
"evalue": "name 'masks' is not defined",
|
79 |
+
"output_type": "error",
|
80 |
+
"traceback": [
|
81 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
82 |
+
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
|
83 |
+
"\u001b[1;32m/workspaces/solar_eyes/sandbox.ipynb Cell 6\u001b[0m line \u001b[0;36m1\n\u001b[0;32m----> <a href='vscode-notebook-cell://dev-container%2B7b22686f737450617468223a225c5c5c5c77736c2e6c6f63616c686f73745c5c5562756e74755c5c686f6d655c5c617a765c5c576f726b73706163655c5c313030302d636f6e7461696e6572735c5c736f6c61722d65796573222c226c6f63616c446f636b6572223a66616c73652c22636f6e66696746696c65223a7b22246d6964223a312c2270617468223a222f686f6d652f617a762f576f726b73706163652f313030302d636f6e7461696e6572732f736f6c61722d657965732f2e646576636f6e7461696e65722f646576636f6e7461696e65722e6a736f6e222c22736368656d65223a227673636f64652d66696c65486f7374227d7d/workspaces/solar_eyes/sandbox.ipynb#X12sdnNjb2RlLXJlbW90ZQ%3D%3D?line=0'>1</a>\u001b[0m squeezed \u001b[39m=\u001b[39m masks[\u001b[39m0\u001b[39m]\u001b[39m.\u001b[39msqueeze()\n\u001b[1;32m <a href='vscode-notebook-cell://dev-container%2B7b22686f737450617468223a225c5c5c5c77736c2e6c6f63616c686f73745c5c5562756e74755c5c686f6d655c5c617a765c5c576f726b73706163655c5c313030302d636f6e7461696e6572735c5c736f6c61722d65796573222c226c6f63616c446f636b6572223a66616c73652c22636f6e66696746696c65223a7b22246d6964223a312c2270617468223a222f686f6d652f617a762f576f726b73706163652f313030302d636f6e7461696e6572732f736f6c61722d657965732f2e646576636f6e7461696e65722f646576636f6e7461696e65722e6a736f6e222c22736368656d65223a227673636f64652d66696c65486f7374227d7d/workspaces/solar_eyes/sandbox.ipynb#X12sdnNjb2RlLXJlbW90ZQ%3D%3D?line=1'>2</a>\u001b[0m squeezed[\u001b[39m0\u001b[39m][\u001b[39m500\u001b[39m][\u001b[39m600\u001b[39m]\n",
|
84 |
+
"\u001b[0;31mNameError\u001b[0m: name 'masks' is not defined"
|
85 |
+
]
|
86 |
+
}
|
87 |
+
],
|
88 |
+
"source": [
|
89 |
+
"squeezed = masks[0].squeeze()\n",
|
90 |
+
"squeezed[0][500][600]"
|
91 |
+
]
|
92 |
+
},
|
93 |
+
{
|
94 |
+
"cell_type": "code",
|
95 |
+
"execution_count": 18,
|
96 |
+
"metadata": {},
|
97 |
+
"outputs": [],
|
98 |
+
"source": [
|
99 |
+
"import numpy as np\n",
|
100 |
+
"def flatten_mask(mask, random_color=False):\n",
|
101 |
+
" if random_color:\n",
|
102 |
+
" color = np.concatenate([np.random.random(3), np.array([0.6])], axis=0)\n",
|
103 |
+
" else:\n",
|
104 |
+
" color = np.array([30/255, 144/255, 255/255, 0.6])\n",
|
105 |
+
" h, w = mask.shape[-2:]\n",
|
106 |
+
" return mask.reshape(h, w, 1) * color.reshape(1, 1, -1)"
|
107 |
+
]
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"cell_type": "code",
|
111 |
+
"execution_count": 22,
|
112 |
+
"metadata": {},
|
113 |
+
"outputs": [
|
114 |
+
{
|
115 |
+
"name": "stdout",
|
116 |
+
"output_type": "stream",
|
117 |
+
"text": [
|
118 |
+
"1764\n",
|
119 |
+
"2646\n"
|
120 |
+
]
|
121 |
+
}
|
122 |
+
],
|
123 |
+
"source": [
|
124 |
+
"squeezed = masks[0].squeeze()\n",
|
125 |
+
"h, w = squeezed.shape[-2:]\n",
|
126 |
+
"print(h)\n",
|
127 |
+
"print(w)"
|
128 |
+
]
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"cell_type": "code",
|
132 |
+
"execution_count": 27,
|
133 |
+
"metadata": {},
|
134 |
+
"outputs": [
|
135 |
+
{
|
136 |
+
"ename": "RuntimeError",
|
137 |
+
"evalue": "shape '[1764, 2646, 1]' is invalid for input of size 14002632",
|
138 |
+
"output_type": "error",
|
139 |
+
"traceback": [
|
140 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
141 |
+
"\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)",
|
142 |
+
"\u001b[1;32m/workspaces/solar_eyes/sandbox.ipynb Cell 9\u001b[0m line \u001b[0;36m1\n\u001b[0;32m----> <a href='vscode-notebook-cell://dev-container%2B7b22686f737450617468223a225c5c5c5c77736c2e6c6f63616c686f73745c5c5562756e74755c5c686f6d655c5c617a765c5c576f726b73706163655c5c313030302d636f6e7461696e6572735c5c736f6c61722d65796573222c226c6f63616c446f636b6572223a66616c73652c22636f6e66696746696c65223a7b22246d6964223a312c2270617468223a222f686f6d652f617a762f576f726b73706163652f313030302d636f6e7461696e6572732f736f6c61722d657965732f2e646576636f6e7461696e65722f646576636f6e7461696e65722e6a736f6e222c22736368656d65223a227673636f64652d66696c65486f7374227d7d/workspaces/solar_eyes/sandbox.ipynb#W6sdnNjb2RlLXJlbW90ZQ%3D%3D?line=0'>1</a>\u001b[0m flatten_mask(masks[\u001b[39m0\u001b[39;49m][\u001b[39m0\u001b[39;49m]\u001b[39m.\u001b[39;49msqueeze())\n",
|
143 |
+
"\u001b[1;32m/workspaces/solar_eyes/sandbox.ipynb Cell 9\u001b[0m line \u001b[0;36m8\n\u001b[1;32m <a href='vscode-notebook-cell://dev-container%2B7b22686f737450617468223a225c5c5c5c77736c2e6c6f63616c686f73745c5c5562756e74755c5c686f6d655c5c617a765c5c576f726b73706163655c5c313030302d636f6e7461696e6572735c5c736f6c61722d65796573222c226c6f63616c446f636b6572223a66616c73652c22636f6e66696746696c65223a7b22246d6964223a312c2270617468223a222f686f6d652f617a762f576f726b73706163652f313030302d636f6e7461696e6572732f736f6c61722d657965732f2e646576636f6e7461696e65722f646576636f6e7461696e65722e6a736f6e222c22736368656d65223a227673636f64652d66696c65486f7374227d7d/workspaces/solar_eyes/sandbox.ipynb#W6sdnNjb2RlLXJlbW90ZQ%3D%3D?line=5'>6</a>\u001b[0m color \u001b[39m=\u001b[39m np\u001b[39m.\u001b[39marray([\u001b[39m30\u001b[39m\u001b[39m/\u001b[39m\u001b[39m255\u001b[39m, \u001b[39m144\u001b[39m\u001b[39m/\u001b[39m\u001b[39m255\u001b[39m, \u001b[39m255\u001b[39m\u001b[39m/\u001b[39m\u001b[39m255\u001b[39m, \u001b[39m0.6\u001b[39m])\n\u001b[1;32m <a href='vscode-notebook-cell://dev-container%2B7b22686f737450617468223a225c5c5c5c77736c2e6c6f63616c686f73745c5c5562756e74755c5c686f6d655c5c617a765c5c576f726b73706163655c5c313030302d636f6e7461696e6572735c5c736f6c61722d65796573222c226c6f63616c446f636b6572223a66616c73652c22636f6e66696746696c65223a7b22246d6964223a312c2270617468223a222f686f6d652f617a762f576f726b73706163652f313030302d636f6e7461696e6572732f736f6c61722d657965732f2e646576636f6e7461696e65722f646576636f6e7461696e65722e6a736f6e222c22736368656d65223a227673636f64652d66696c65486f7374227d7d/workspaces/solar_eyes/sandbox.ipynb#W6sdnNjb2RlLXJlbW90ZQ%3D%3D?line=6'>7</a>\u001b[0m h, w \u001b[39m=\u001b[39m mask\u001b[39m.\u001b[39mshape[\u001b[39m-\u001b[39m\u001b[39m2\u001b[39m:]\n\u001b[0;32m----> <a href='vscode-notebook-cell://dev-container%2B7b22686f737450617468223a225c5c5c5c77736c2e6c6f63616c686f73745c5c5562756e74755c5c686f6d655c5c617a765c5c576f726b73706163655c5c313030302d636f6e7461696e6572735c5c736f6c61722d65796573222c226c6f63616c446f636b6572223a66616c73652c22636f6e66696746696c65223a7b22246d6964223a312c2270617468223a222f686f6d652f617a762f576f726b73706163652f313030302d636f6e7461696e6572732f736f6c61722d657965732f2e646576636f6e7461696e65722f646576636f6e7461696e65722e6a736f6e222c22736368656d65223a227673636f64652d66696c65486f7374227d7d/workspaces/solar_eyes/sandbox.ipynb#W6sdnNjb2RlLXJlbW90ZQ%3D%3D?line=7'>8</a>\u001b[0m \u001b[39mreturn\u001b[39;00m mask\u001b[39m.\u001b[39;49mreshape(h, w, \u001b[39m1\u001b[39;49m) \u001b[39m*\u001b[39m color\u001b[39m.\u001b[39mreshape(\u001b[39m1\u001b[39m, \u001b[39m1\u001b[39m, \u001b[39m-\u001b[39m\u001b[39m1\u001b[39m)\n",
|
144 |
+
"\u001b[0;31mRuntimeError\u001b[0m: shape '[1764, 2646, 1]' is invalid for input of size 14002632"
|
145 |
+
]
|
146 |
+
}
|
147 |
+
],
|
148 |
+
"source": [
|
149 |
+
"flatten_mask(masks[0][0].squeeze())"
|
150 |
+
]
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"cell_type": "code",
|
154 |
+
"execution_count": null,
|
155 |
+
"metadata": {},
|
156 |
+
"outputs": [],
|
157 |
+
"source": []
|
158 |
+
}
|
159 |
+
],
|
160 |
+
"metadata": {
|
161 |
+
"kernelspec": {
|
162 |
+
"display_name": "Python 3",
|
163 |
+
"language": "python",
|
164 |
+
"name": "python3"
|
165 |
+
},
|
166 |
+
"language_info": {
|
167 |
+
"codemirror_mode": {
|
168 |
+
"name": "ipython",
|
169 |
+
"version": 3
|
170 |
+
},
|
171 |
+
"file_extension": ".py",
|
172 |
+
"mimetype": "text/x-python",
|
173 |
+
"name": "python",
|
174 |
+
"nbconvert_exporter": "python",
|
175 |
+
"pygments_lexer": "ipython3",
|
176 |
+
"version": "3.10.12"
|
177 |
+
}
|
178 |
+
},
|
179 |
+
"nbformat": 4,
|
180 |
+
"nbformat_minor": 2
|
181 |
+
}
|
solareyes/__init__.py
ADDED
File without changes
|
solareyes/sam.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from transformers import SamModel, SamProcessor
|
3 |
+
|
4 |
+
class SAM():
|
5 |
+
def __init__(self):
|
6 |
+
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
7 |
+
self.model = SamModel.from_pretrained("facebook/sam-vit-large").to(self.device)
|
8 |
+
self.processor = SamProcessor.from_pretrained("facebook/sam-vit-large")
|
9 |
+
|
10 |
+
def segment(self, raw_image, input_points):
|
11 |
+
|
12 |
+
inputs = self.processor(raw_image, input_points=input_points, return_tensors="pt").to(self.device)
|
13 |
+
with torch.no_grad():
|
14 |
+
outputs = self.model(**inputs)
|
15 |
+
|
16 |
+
masks = self.processor.image_processor.post_process_masks(
|
17 |
+
outputs.pred_masks.cpu(), inputs["original_sizes"].cpu(), inputs["reshaped_input_sizes"].cpu()
|
18 |
+
)
|
19 |
+
scores = outputs.iou_scores
|
20 |
+
|
21 |
+
return masks, scores
|