Initial code import
Browse files- .gitattributes +1 -0
- .gitignore +2 -0
- LICENSE +22 -0
- README.md +225 -0
- cliponnx/__init__.py +0 -0
- cliponnx/bpe_simple_vocab_16e6.txt.gz +3 -0
- cliponnx/models.py +159 -0
- cliponnx/simple_tokenizer.py +134 -0
- convert.py +104 -0
- example.py +66 -0
- flowers.jpg +3 -0
- heavy-industry.jpg +3 -0
- poetry.lock +841 -0
- poetry.toml +2 -0
- pyproject.toml +32 -0
- variants.py +108 -0
.gitattributes
CHANGED
@@ -30,3 +30,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
30 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
31 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
32 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
30 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
31 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
32 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.jpg filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
__pycache__
|
2 |
+
.venv
|
LICENSE
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
MIT License
|
2 |
+
|
3 |
+
Copyright (c) 2022 Miha Lunar (other scripts)
|
4 |
+
Copyright (c) 2021 OpenAI (models, simple_tokenizer.py, bpe_simple_vocab_16e6.txt.gz, parts of models.py)
|
5 |
+
|
6 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
7 |
+
of this software and associated documentation files (the "Software"), to deal
|
8 |
+
in the Software without restriction, including without limitation the rights
|
9 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
10 |
+
copies of the Software, and to permit persons to whom the Software is
|
11 |
+
furnished to do so, subject to the following conditions:
|
12 |
+
|
13 |
+
The above copyright notice and this permission notice shall be included in all
|
14 |
+
copies or substantial portions of the Software.
|
15 |
+
|
16 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
17 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
18 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
19 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
20 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
21 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
22 |
+
SOFTWARE.
|
README.md
ADDED
@@ -0,0 +1,225 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
language:
|
3 |
+
- en
|
4 |
+
license: mit
|
5 |
+
tags:
|
6 |
+
- clip
|
7 |
+
- vision
|
8 |
+
---
|
9 |
+
|
10 |
+
# CLIP Variants
|
11 |
+
|
12 |
+
_The CLIP model was developed by researchers at OpenAI to learn about what contributes to robustness in computer vision tasks. The model was also developed to test the ability of models to generalize to arbitrary image classification tasks in a zero-shot manner. It was not developed for general model deployment - to deploy models like CLIP, researchers will first need to carefully study their capabilities in relation to the specific context they’re being deployed within._
|
13 |
+
|
14 |
+
See the original [CLIP Model Card][clip-model-card] for more details on limitations and biases.
|
15 |
+
|
16 |
+
This repository holds [OpenAI's CLIP][clip] models converted into many other variants, see below for more details.
|
17 |
+
|
18 |
+
## Disclaimer & License
|
19 |
+
|
20 |
+
I haven't done many tests on these conversions. I've briefly tried the float16 versions, which seem very similar to the original float32, however the similarity seems to drop more with the qint8/quint8 versions as expected. I couldn't try qint8 as it seemed unsupported for some operations, but I'm including it for completeness. From a brief test the quint8 version seemed to work fine.
|
21 |
+
|
22 |
+
The license for the conversion code is MIT, the license for the models is the same as the original license for the OpenAI models (🤷♂️). I have no affiliation with OpenAI.
|
23 |
+
|
24 |
+
## Acknowledgements
|
25 |
+
* [OpenAI CLIP][clip]
|
26 |
+
* [OpenAI CLIP JavaScript by josephrocca](https://github.com/josephrocca/openai-clip-js)
|
27 |
+
* [CLIP-ONNX by Lednik7](https://github.com/Lednik7/CLIP-ONNX)
|
28 |
+
* [Exporting a Model from PyTorch to ONNX and Running it using ONNX Runtime](https://pytorch.org/tutorials/advanced/super_resolution_with_onnxruntime.html)
|
29 |
+
* [imgbeddings by minimaxir](https://github.com/minimaxir/imgbeddings)
|
30 |
+
* ... probably more
|
31 |
+
|
32 |
+
## Example
|
33 |
+
|
34 |
+
See [example.py](./example.py)
|
35 |
+
|
36 |
+
```
|
37 |
+
❯ python .\example.py
|
38 |
+
Loading visual model: models/clip-vit-base-patch32-visual-float16.onnx
|
39 |
+
Visual inference ready, input size 224, type tensor(float16)
|
40 |
+
Images shape: (2, 3, 224, 224)
|
41 |
+
Embeddings shape: (2, 512)
|
42 |
+
|
43 |
+
Loading textual model: models/clip-vit-base-patch32-textual-float16.onnx
|
44 |
+
Textual inference ready, input size 77, type tensor(int32)
|
45 |
+
Texts shape: (14, 77)
|
46 |
+
Embeddings shape: (14, 512)
|
47 |
+
|
48 |
+
flowers.jpg
|
49 |
+
-------- -------- ---------------------------------------------------------------
|
50 |
+
0.294922 >>>>>>>> a close up photo of a cherry blossom
|
51 |
+
0.267578 >>>>>>>> cherry blossom
|
52 |
+
0.249878 >>>>>>> flowers
|
53 |
+
0.242554 >>>>>>> a photo taken on a bright and sunny day
|
54 |
+
0.228882 >>>>>> bees
|
55 |
+
0.222778 >>>>>> plant
|
56 |
+
0.216187 >>>>>> a photo taken on a dark and cloudy day
|
57 |
+
0.201538 >>>>>> ruhrgebiet
|
58 |
+
0.196655 >>>>> processing plant
|
59 |
+
0.192139 >>>>> a photo taken at midnight
|
60 |
+
0.18689 >>>>> industry
|
61 |
+
0.177856 >>>>> cars
|
62 |
+
0.176636 >>>>> dogs and cats
|
63 |
+
0.111267 >>> a large industrial plant with many pipes, walkways and railings
|
64 |
+
-------- -------- ---------------------------------------------------------------
|
65 |
+
|
66 |
+
heavy-industry.jpg
|
67 |
+
-------- ---------- ---------------------------------------------------------------
|
68 |
+
0.336182 >>>>>>>>>> a large industrial plant with many pipes, walkways and railings
|
69 |
+
0.316895 >>>>>>>>> processing plant
|
70 |
+
0.302002 >>>>>>>>> industry
|
71 |
+
0.27417 >>>>>>>> ruhrgebiet
|
72 |
+
0.254883 >>>>>>> plant
|
73 |
+
0.22876 >>>>>> a photo taken on a dark and cloudy day
|
74 |
+
0.219482 >>>>>> a photo taken on a bright and sunny day
|
75 |
+
0.211304 >>>>>> a photo taken at midnight
|
76 |
+
0.198608 >>>>> cars
|
77 |
+
0.190552 >>>>> flowers
|
78 |
+
0.181885 >>>>> bees
|
79 |
+
0.180542 >>>>> cherry blossom
|
80 |
+
0.174438 >>>>> dogs and cats
|
81 |
+
0.14917 >>>> a close up photo of a cherry blossom
|
82 |
+
-------- ---------- ---------------------------------------------------------------
|
83 |
+
```
|
84 |
+
|
85 |
+
## Parameters
|
86 |
+
|
87 |
+
The only format supported right now is [Open Neural Network Exchange (ONNX)][onnx].
|
88 |
+
|
89 |
+
All the currently available OpenAI models have been converted. Some of the IDs were taken from [Open AI models on Hugging Face](https://huggingface.co/openai), others were made up following the same format.
|
90 |
+
|
91 |
+
| Model name | Model ID |
|
92 |
+
| --- | --- |
|
93 |
+
| RN50 | resnet-50 |
|
94 |
+
| RN101 | resnet-101 |
|
95 |
+
| RN50x4 | resnet-50x4 |
|
96 |
+
| RN50x16 | resnet-50x16 |
|
97 |
+
| RN50x64 | resnet-50x64 |
|
98 |
+
| RN50 | resnet-50 |
|
99 |
+
| RN50 | resnet-50 |
|
100 |
+
| RN50 | resnet-50 |
|
101 |
+
| ViT-B/16 | vit-base-patch16 |
|
102 |
+
| ViT-B/32 | vit-base-patch32 |
|
103 |
+
| ViT-L/14 | vit-large-patch14 |
|
104 |
+
| ViT-L/14@336px | vit-large-patch14-336 |
|
105 |
+
|
106 |
+
As CLIP is a multimodal model, the original models are split into two separate "modes", one for processing images
|
107 |
+
and the other for processing text.
|
108 |
+
|
109 |
+
| Mode |
|
110 |
+
|---------|
|
111 |
+
| visual |
|
112 |
+
| textual |
|
113 |
+
|
114 |
+
The models were converted into multiple data types as well.
|
115 |
+
|
116 |
+
| Data Type |
|
117 |
+
|-------------|
|
118 |
+
| float16 |
|
119 |
+
| qint8 |
|
120 |
+
| quint8 |
|
121 |
+
|
122 |
+
## Variants
|
123 |
+
|
124 |
+
| Path | Model ID | Mode | Data Type | Available | Size (MB) |
|
125 |
+
|--------------------------------------------------------|-----------------------|---------|--------------------|-------------|-------------|
|
126 |
+
| models/clip-resnet-50-visual.onnx | resnet-50 | visual | float32 (original) | ✅ | 153 |
|
127 |
+
| models/clip-resnet-50-visual-float16.onnx | resnet-50 | visual | float16 | ✅ | 77 |
|
128 |
+
| models/clip-resnet-50-visual-qint8.onnx | resnet-50 | visual | qint8 | ✅ | 39 |
|
129 |
+
| models/clip-resnet-50-visual-quint8.onnx | resnet-50 | visual | quint8 | ✅ | 39 |
|
130 |
+
| models/clip-resnet-50-textual.onnx | resnet-50 | textual | float32 (original) | ✅ | 255 |
|
131 |
+
| models/clip-resnet-50-textual-float16.onnx | resnet-50 | textual | float16 | ✅ | 128 |
|
132 |
+
| models/clip-resnet-50-textual-qint8.onnx | resnet-50 | textual | qint8 | ✅ | 64 |
|
133 |
+
| models/clip-resnet-50-textual-quint8.onnx | resnet-50 | textual | quint8 | ✅ | 64 |
|
134 |
+
| models/clip-resnet-101-visual.onnx | resnet-101 | visual | float32 (original) | ✅ | 225 |
|
135 |
+
| models/clip-resnet-101-visual-float16.onnx | resnet-101 | visual | float16 | ✅ | 112 |
|
136 |
+
| models/clip-resnet-101-visual-qint8.onnx | resnet-101 | visual | qint8 | ✅ | 57 |
|
137 |
+
| models/clip-resnet-101-visual-quint8.onnx | resnet-101 | visual | quint8 | ✅ | 57 |
|
138 |
+
| models/clip-resnet-101-textual.onnx | resnet-101 | textual | float32 (original) | ✅ | 254 |
|
139 |
+
| models/clip-resnet-101-textual-float16.onnx | resnet-101 | textual | float16 | ✅ | 127 |
|
140 |
+
| models/clip-resnet-101-textual-qint8.onnx | resnet-101 | textual | qint8 | ✅ | 64 |
|
141 |
+
| models/clip-resnet-101-textual-quint8.onnx | resnet-101 | textual | quint8 | ✅ | 64 |
|
142 |
+
| models/clip-resnet-50x4-visual.onnx | resnet-50x4 | visual | float32 (original) | ✅ | 348 |
|
143 |
+
| models/clip-resnet-50x4-visual-float16.onnx | resnet-50x4 | visual | float16 | ✅ | 174 |
|
144 |
+
| models/clip-resnet-50x4-visual-qint8.onnx | resnet-50x4 | visual | qint8 | ✅ | 88 |
|
145 |
+
| models/clip-resnet-50x4-visual-quint8.onnx | resnet-50x4 | visual | quint8 | ✅ | 88 |
|
146 |
+
| models/clip-resnet-50x4-textual.onnx | resnet-50x4 | textual | float32 (original) | ✅ | 365 |
|
147 |
+
| models/clip-resnet-50x4-textual-float16.onnx | resnet-50x4 | textual | float16 | ✅ | 183 |
|
148 |
+
| models/clip-resnet-50x4-textual-qint8.onnx | resnet-50x4 | textual | qint8 | ✅ | 92 |
|
149 |
+
| models/clip-resnet-50x4-textual-quint8.onnx | resnet-50x4 | textual | quint8 | ✅ | 92 |
|
150 |
+
| models/clip-resnet-50x16-visual.onnx | resnet-50x16 | visual | float32 (original) | ✅ | 669 |
|
151 |
+
| models/clip-resnet-50x16-visual-float16.onnx | resnet-50x16 | visual | float16 | ✅ | 335 |
|
152 |
+
| models/clip-resnet-50x16-visual-qint8.onnx | resnet-50x16 | visual | qint8 | ✅ | 169 |
|
153 |
+
| models/clip-resnet-50x16-visual-quint8.onnx | resnet-50x16 | visual | quint8 | ✅ | 169 |
|
154 |
+
| models/clip-resnet-50x16-textual.onnx | resnet-50x16 | textual | float32 (original) | ✅ | 495 |
|
155 |
+
| models/clip-resnet-50x16-textual-float16.onnx | resnet-50x16 | textual | float16 | ✅ | 248 |
|
156 |
+
| models/clip-resnet-50x16-textual-qint8.onnx | resnet-50x16 | textual | qint8 | ✅ | 124 |
|
157 |
+
| models/clip-resnet-50x16-textual-quint8.onnx | resnet-50x16 | textual | quint8 | ✅ | 124 |
|
158 |
+
| models/clip-resnet-50x64-visual.onnx | resnet-50x64 | visual | float32 (original) | ✅ | 1681 |
|
159 |
+
| models/clip-resnet-50x64-visual-float16.onnx | resnet-50x64 | visual | float16 | ✅ | 840 |
|
160 |
+
| models/clip-resnet-50x64-visual-qint8.onnx | resnet-50x64 | visual | qint8 | ✅ | 424 |
|
161 |
+
| models/clip-resnet-50x64-visual-quint8.onnx | resnet-50x64 | visual | quint8 | ✅ | 424 |
|
162 |
+
| models/clip-resnet-50x64-textual.onnx | resnet-50x64 | textual | float32 (original) | ✅ | 812 |
|
163 |
+
| models/clip-resnet-50x64-textual-float16.onnx | resnet-50x64 | textual | float16 | ✅ | 406 |
|
164 |
+
| models/clip-resnet-50x64-textual-qint8.onnx | resnet-50x64 | textual | qint8 | ✅ | 204 |
|
165 |
+
| models/clip-resnet-50x64-textual-quint8.onnx | resnet-50x64 | textual | quint8 | ✅ | 204 |
|
166 |
+
| models/clip-resnet-50-visual.onnx | resnet-50 | visual | float32 (original) | ✅ | 153 |
|
167 |
+
| models/clip-resnet-50-visual-float16.onnx | resnet-50 | visual | float16 | ✅ | 77 |
|
168 |
+
| models/clip-resnet-50-visual-qint8.onnx | resnet-50 | visual | qint8 | ✅ | 39 |
|
169 |
+
| models/clip-resnet-50-visual-quint8.onnx | resnet-50 | visual | quint8 | ✅ | 39 |
|
170 |
+
| models/clip-resnet-50-textual.onnx | resnet-50 | textual | float32 (original) | ✅ | 255 |
|
171 |
+
| models/clip-resnet-50-textual-float16.onnx | resnet-50 | textual | float16 | ✅ | 128 |
|
172 |
+
| models/clip-resnet-50-textual-qint8.onnx | resnet-50 | textual | qint8 | ✅ | 64 |
|
173 |
+
| models/clip-resnet-50-textual-quint8.onnx | resnet-50 | textual | quint8 | ✅ | 64 |
|
174 |
+
| models/clip-resnet-50-visual.onnx | resnet-50 | visual | float32 (original) | ✅ | 153 |
|
175 |
+
| models/clip-resnet-50-visual-float16.onnx | resnet-50 | visual | float16 | ✅ | 77 |
|
176 |
+
| models/clip-resnet-50-visual-qint8.onnx | resnet-50 | visual | qint8 | ✅ | 39 |
|
177 |
+
| models/clip-resnet-50-visual-quint8.onnx | resnet-50 | visual | quint8 | ✅ | 39 |
|
178 |
+
| models/clip-resnet-50-textual.onnx | resnet-50 | textual | float32 (original) | ✅ | 255 |
|
179 |
+
| models/clip-resnet-50-textual-float16.onnx | resnet-50 | textual | float16 | ✅ | 128 |
|
180 |
+
| models/clip-resnet-50-textual-qint8.onnx | resnet-50 | textual | qint8 | ✅ | 64 |
|
181 |
+
| models/clip-resnet-50-textual-quint8.onnx | resnet-50 | textual | quint8 | ✅ | 64 |
|
182 |
+
| models/clip-resnet-50-visual.onnx | resnet-50 | visual | float32 (original) | ✅ | 153 |
|
183 |
+
| models/clip-resnet-50-visual-float16.onnx | resnet-50 | visual | float16 | ✅ | 77 |
|
184 |
+
| models/clip-resnet-50-visual-qint8.onnx | resnet-50 | visual | qint8 | ✅ | 39 |
|
185 |
+
| models/clip-resnet-50-visual-quint8.onnx | resnet-50 | visual | quint8 | ✅ | 39 |
|
186 |
+
| models/clip-resnet-50-textual.onnx | resnet-50 | textual | float32 (original) | ✅ | 255 |
|
187 |
+
| models/clip-resnet-50-textual-float16.onnx | resnet-50 | textual | float16 | ✅ | 128 |
|
188 |
+
| models/clip-resnet-50-textual-qint8.onnx | resnet-50 | textual | qint8 | ✅ | 64 |
|
189 |
+
| models/clip-resnet-50-textual-quint8.onnx | resnet-50 | textual | quint8 | ✅ | 64 |
|
190 |
+
| models/clip-vit-base-patch16-visual.onnx | vit-base-patch16 | visual | float32 (original) | ✅ | 345 |
|
191 |
+
| models/clip-vit-base-patch16-visual-float16.onnx | vit-base-patch16 | visual | float16 | ✅ | 173 |
|
192 |
+
| models/clip-vit-base-patch16-visual-qint8.onnx | vit-base-patch16 | visual | qint8 | ✅ | 87 |
|
193 |
+
| models/clip-vit-base-patch16-visual-quint8.onnx | vit-base-patch16 | visual | quint8 | ✅ | 87 |
|
194 |
+
| models/clip-vit-base-patch16-textual.onnx | vit-base-patch16 | textual | float32 (original) | ✅ | 254 |
|
195 |
+
| models/clip-vit-base-patch16-textual-float16.onnx | vit-base-patch16 | textual | float16 | ✅ | 127 |
|
196 |
+
| models/clip-vit-base-patch16-textual-qint8.onnx | vit-base-patch16 | textual | qint8 | ✅ | 64 |
|
197 |
+
| models/clip-vit-base-patch16-textual-quint8.onnx | vit-base-patch16 | textual | quint8 | ✅ | 64 |
|
198 |
+
| models/clip-vit-base-patch32-visual.onnx | vit-base-patch32 | visual | float32 (original) | ✅ | 352 |
|
199 |
+
| models/clip-vit-base-patch32-visual-float16.onnx | vit-base-patch32 | visual | float16 | ✅ | 176 |
|
200 |
+
| models/clip-vit-base-patch32-visual-qint8.onnx | vit-base-patch32 | visual | qint8 | ✅ | 89 |
|
201 |
+
| models/clip-vit-base-patch32-visual-quint8.onnx | vit-base-patch32 | visual | quint8 | ✅ | 89 |
|
202 |
+
| models/clip-vit-base-patch32-textual.onnx | vit-base-patch32 | textual | float32 (original) | ✅ | 254 |
|
203 |
+
| models/clip-vit-base-patch32-textual-float16.onnx | vit-base-patch32 | textual | float16 | ✅ | 127 |
|
204 |
+
| models/clip-vit-base-patch32-textual-qint8.onnx | vit-base-patch32 | textual | qint8 | ✅ | 64 |
|
205 |
+
| models/clip-vit-base-patch32-textual-quint8.onnx | vit-base-patch32 | textual | quint8 | ✅ | 64 |
|
206 |
+
| models/clip-vit-large-patch14-visual.onnx | vit-large-patch14 | visual | float32 (original) | ✅ | 1216 |
|
207 |
+
| models/clip-vit-large-patch14-visual-float16.onnx | vit-large-patch14 | visual | float16 | ✅ | 608 |
|
208 |
+
| models/clip-vit-large-patch14-visual-qint8.onnx | vit-large-patch14 | visual | qint8 | ✅ | 306 |
|
209 |
+
| models/clip-vit-large-patch14-visual-quint8.onnx | vit-large-patch14 | visual | quint8 | ✅ | 306 |
|
210 |
+
| models/clip-vit-large-patch14-textual.onnx | vit-large-patch14 | textual | float32 (original) | ✅ | 495 |
|
211 |
+
| models/clip-vit-large-patch14-textual-float16.onnx | vit-large-patch14 | textual | float16 | ✅ | 247 |
|
212 |
+
| models/clip-vit-large-patch14-textual-qint8.onnx | vit-large-patch14 | textual | qint8 | ✅ | 124 |
|
213 |
+
| models/clip-vit-large-patch14-textual-quint8.onnx | vit-large-patch14 | textual | quint8 | ✅ | 124 |
|
214 |
+
| models/clip-vit-large-patch14-336-visual.onnx | vit-large-patch14-336 | visual | float32 (original) | ✅ | 1217 |
|
215 |
+
| models/clip-vit-large-patch14-336-visual-float16.onnx | vit-large-patch14-336 | visual | float16 | ✅ | 609 |
|
216 |
+
| models/clip-vit-large-patch14-336-visual-qint8.onnx | vit-large-patch14-336 | visual | qint8 | ✅ | 307 |
|
217 |
+
| models/clip-vit-large-patch14-336-visual-quint8.onnx | vit-large-patch14-336 | visual | quint8 | ✅ | 307 |
|
218 |
+
| models/clip-vit-large-patch14-336-textual.onnx | vit-large-patch14-336 | textual | float32 (original) | ✅ | 495 |
|
219 |
+
| models/clip-vit-large-patch14-336-textual-float16.onnx | vit-large-patch14-336 | textual | float16 | ✅ | 247 |
|
220 |
+
| models/clip-vit-large-patch14-336-textual-qint8.onnx | vit-large-patch14-336 | textual | qint8 | ✅ | 124 |
|
221 |
+
| models/clip-vit-large-patch14-336-textual-quint8.onnx | vit-large-patch14-336 | textual | quint8 | ✅ | 124 |
|
222 |
+
|
223 |
+
[onnx]: https://onnx.ai/
|
224 |
+
[clip]: https://github.com/openai/CLIP
|
225 |
+
[clip-model-card]: https://github.com/openai/CLIP/blob/b4ae44927b78d0093b556e3ce43cbdcff422017a/model-card.md
|
cliponnx/__init__.py
ADDED
File without changes
|
cliponnx/bpe_simple_vocab_16e6.txt.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:924691ac288e54409236115652ad4aa250f48203de50a9e4722a6ecd48d6804a
|
3 |
+
size 1356917
|
cliponnx/models.py
ADDED
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Based on https://github.com/openai/CLIP/blob/main/clip/model.py
|
2 |
+
|
3 |
+
import onnxruntime
|
4 |
+
import numpy as np
|
5 |
+
from typing import List, Union
|
6 |
+
from PIL import Image
|
7 |
+
|
8 |
+
from clip.simple_tokenizer import SimpleTokenizer
|
9 |
+
|
10 |
+
def onnx_node_type_np_type(type):
|
11 |
+
if type == "tensor(float)":
|
12 |
+
return np.float32
|
13 |
+
if type == "tensor(float16)":
|
14 |
+
return np.float16
|
15 |
+
if type == "tensor(int32)":
|
16 |
+
return np.int32
|
17 |
+
if type == "tensor(int64)":
|
18 |
+
return np.int64
|
19 |
+
raise NotImplementedError(f"Unsupported onnx type: {type}")
|
20 |
+
|
21 |
+
def ensure_input_type(input, type):
|
22 |
+
np_type = onnx_node_type_np_type(type)
|
23 |
+
if input.dtype == type:
|
24 |
+
return input
|
25 |
+
return input.astype(dtype=np_type)
|
26 |
+
|
27 |
+
class VisualModel:
|
28 |
+
def __init__(self, path, providers=None):
|
29 |
+
self.path = path
|
30 |
+
print(f"Loading visual model: {path}")
|
31 |
+
self.sess = onnxruntime.InferenceSession(path, providers=providers)
|
32 |
+
self.input = self.sess.get_inputs()[0]
|
33 |
+
self.output = self.sess.get_outputs()[0]
|
34 |
+
|
35 |
+
if len(self.input.shape) != 4 or self.input.shape[2] != self.input.shape[3]:
|
36 |
+
raise ValueError(f"unexpected shape {self.input.shape}")
|
37 |
+
self.input_size = self.input.shape[2]
|
38 |
+
print(f"Visual inference ready, input size {self.input_size}, type {self.input.type}")
|
39 |
+
|
40 |
+
def encode(self, image_input):
|
41 |
+
image_input = ensure_input_type(image_input, self.input.type)
|
42 |
+
return self.sess.run([self.output.name], {self.input.name: image_input})[0]
|
43 |
+
|
44 |
+
def fitted(self, size, w, h):
|
45 |
+
short, long = (w, h) if w <= h else (h, w)
|
46 |
+
new_short, new_long = size, int(size * long / short)
|
47 |
+
new_w, new_h = (new_short, new_long) if w <= h else (new_long, new_short)
|
48 |
+
return [new_w, new_h]
|
49 |
+
|
50 |
+
def resize_to(self, img, size):
|
51 |
+
new_size = self.fitted(size, img.width, img.height)
|
52 |
+
return img.resize(size=new_size, resample=Image.Resampling.BICUBIC)
|
53 |
+
|
54 |
+
def center_crop(self, img, size):
|
55 |
+
image_height = img.height
|
56 |
+
image_width = img.width
|
57 |
+
if size > image_width or size > image_height:
|
58 |
+
padding_ltrb = [
|
59 |
+
(size - image_width) // 2 if size > image_width else 0,
|
60 |
+
(size - image_height) // 2 if size > image_height else 0,
|
61 |
+
(size - image_width + 1) // 2 if size > image_width else 0,
|
62 |
+
(size - image_height + 1) // 2 if size > image_height else 0,
|
63 |
+
]
|
64 |
+
img = img.pad(img, padding_ltrb, fill=0)
|
65 |
+
image_width = img.width
|
66 |
+
image_height = img.height
|
67 |
+
if size == image_width and size == image_height:
|
68 |
+
return img
|
69 |
+
top = int(round((image_height - size) / 2.0))
|
70 |
+
left = int(round((image_width - size) / 2.0))
|
71 |
+
return img.crop((left, top, left + size, top + size))
|
72 |
+
|
73 |
+
def to_numpy(self, pic):
|
74 |
+
mode_to_nptype = {"I": np.int32, "I;16": np.int16, "F": np.float32}
|
75 |
+
img = np.array(pic, mode_to_nptype.get(pic.mode, np.uint8), copy=True)
|
76 |
+
if pic.mode == "1":
|
77 |
+
img = 255 * img
|
78 |
+
img = np.transpose(img, (2, 0, 1))
|
79 |
+
img = img.astype(np.float32)
|
80 |
+
img = np.divide(img, 255)
|
81 |
+
return img
|
82 |
+
|
83 |
+
def normalize(self, img):
|
84 |
+
mean = np.array([0.48145466, 0.4578275, 0.40821073]).reshape((-1, 1, 1))
|
85 |
+
std = np.array([0.26862954, 0.26130258, 0.27577711]).reshape((-1, 1, 1))
|
86 |
+
return np.divide(np.subtract(img, mean), std)
|
87 |
+
|
88 |
+
def preprocess(self, img):
|
89 |
+
img = self.resize_to(img, self.input_size)
|
90 |
+
img = self.center_crop(img, self.input_size)
|
91 |
+
img = img.convert("RGB")
|
92 |
+
img_np = self.to_numpy(img)
|
93 |
+
img_np = self.normalize(img_np)
|
94 |
+
return img_np
|
95 |
+
|
96 |
+
def preprocess_images(self, images):
|
97 |
+
preprocessed = []
|
98 |
+
for img in images:
|
99 |
+
if isinstance(img, str):
|
100 |
+
img = Image.open(img)
|
101 |
+
preprocessed.append(self.preprocess(img))
|
102 |
+
return np.stack(preprocessed)
|
103 |
+
|
104 |
+
class TextualModel:
|
105 |
+
def __init__(self, path, providers=None):
|
106 |
+
self.path = path
|
107 |
+
print(f"Loading textual model: {path}")
|
108 |
+
self.sess = onnxruntime.InferenceSession(path, providers=providers)
|
109 |
+
self.input = self.sess.get_inputs()[0]
|
110 |
+
self.output = self.sess.get_outputs()[0]
|
111 |
+
self.tokenizer = SimpleTokenizer()
|
112 |
+
|
113 |
+
if len(self.input.shape) != 2 or self.input.shape[1] != 77:
|
114 |
+
raise ValueError(f"unexpected shape {self.input.shape}")
|
115 |
+
self.input_size = self.input.shape[1]
|
116 |
+
print(f"Textual inference ready, input size {self.input_size}, type {self.input.type}")
|
117 |
+
|
118 |
+
def encode(self, texts):
|
119 |
+
return self.sess.run([self.output.name], {self.input.name: texts})[0]
|
120 |
+
|
121 |
+
def tokenize(self, texts: Union[str, List[str]], context_length: int = 77, truncate: bool = False) -> np.array:
|
122 |
+
"""
|
123 |
+
Returns the tokenized representation of given input string(s)
|
124 |
+
|
125 |
+
Parameters
|
126 |
+
----------
|
127 |
+
texts : Union[str, List[str]]
|
128 |
+
An input string or a list of input strings to tokenize
|
129 |
+
|
130 |
+
context_length : int
|
131 |
+
The context length to use; all CLIP models use 77 as the context length
|
132 |
+
|
133 |
+
truncate: bool
|
134 |
+
Whether to truncate the text in case its encoding is longer than the context length
|
135 |
+
|
136 |
+
Returns
|
137 |
+
-------
|
138 |
+
A two-dimensional tensor containing the resulting tokens, shape = [number of input strings, context_length].
|
139 |
+
We return LongTensor when torch version is <1.8.0, since older index_select requires indices to be long.
|
140 |
+
"""
|
141 |
+
if isinstance(texts, str):
|
142 |
+
texts = [texts]
|
143 |
+
|
144 |
+
sot_token = self.tokenizer.encoder["<|startoftext|>"]
|
145 |
+
eot_token = self.tokenizer.encoder["<|endoftext|>"]
|
146 |
+
all_tokens = [[sot_token] + self.tokenizer.encode(text) + [eot_token] for text in texts]
|
147 |
+
input_type = onnx_node_type_np_type(self.input.type)
|
148 |
+
result = np.zeros(shape=(len(all_tokens), context_length), dtype=input_type)
|
149 |
+
|
150 |
+
for i, tokens in enumerate(all_tokens):
|
151 |
+
if len(tokens) > context_length:
|
152 |
+
if truncate:
|
153 |
+
tokens = tokens[:context_length]
|
154 |
+
tokens[-1] = eot_token
|
155 |
+
else:
|
156 |
+
raise RuntimeError(f"Input {texts[i]} is too long for context length {context_length}")
|
157 |
+
result[i, :len(tokens)] = np.array(tokens)
|
158 |
+
|
159 |
+
return result
|
cliponnx/simple_tokenizer.py
ADDED
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# MIT License - copied from https://github.com/openai/CLIP/blob/main/clip/simple_tokenizer.py
|
2 |
+
|
3 |
+
import gzip
|
4 |
+
import html
|
5 |
+
import os
|
6 |
+
from functools import lru_cache
|
7 |
+
|
8 |
+
import ftfy
|
9 |
+
import regex as re
|
10 |
+
|
11 |
+
|
12 |
+
@lru_cache()
|
13 |
+
def default_bpe():
|
14 |
+
return os.path.join(os.path.dirname(os.path.abspath(__file__)), "bpe_simple_vocab_16e6.txt.gz")
|
15 |
+
|
16 |
+
|
17 |
+
@lru_cache()
|
18 |
+
def bytes_to_unicode():
|
19 |
+
"""
|
20 |
+
Returns list of utf-8 byte and a corresponding list of unicode strings.
|
21 |
+
The reversible bpe codes work on unicode strings.
|
22 |
+
This means you need a large # of unicode characters in your vocab if you want to avoid UNKs.
|
23 |
+
When you're at something like a 10B token dataset you end up needing around 5K for decent coverage.
|
24 |
+
This is a signficant percentage of your normal, say, 32K bpe vocab.
|
25 |
+
To avoid that, we want lookup tables between utf-8 bytes and unicode strings.
|
26 |
+
And avoids mapping to whitespace/control characters the bpe code barfs on.
|
27 |
+
"""
|
28 |
+
bs = list(range(ord("!"), ord("~")+1))+list(range(ord("¡"), ord("¬")+1))+list(range(ord("®"), ord("ÿ")+1))
|
29 |
+
cs = bs[:]
|
30 |
+
n = 0
|
31 |
+
for b in range(2**8):
|
32 |
+
if b not in bs:
|
33 |
+
bs.append(b)
|
34 |
+
cs.append(2**8+n)
|
35 |
+
n += 1
|
36 |
+
cs = [chr(n) for n in cs]
|
37 |
+
return dict(zip(bs, cs))
|
38 |
+
|
39 |
+
|
40 |
+
def get_pairs(word):
|
41 |
+
"""Return set of symbol pairs in a word.
|
42 |
+
Word is represented as tuple of symbols (symbols being variable-length strings).
|
43 |
+
"""
|
44 |
+
pairs = set()
|
45 |
+
prev_char = word[0]
|
46 |
+
for char in word[1:]:
|
47 |
+
pairs.add((prev_char, char))
|
48 |
+
prev_char = char
|
49 |
+
return pairs
|
50 |
+
|
51 |
+
|
52 |
+
def basic_clean(text):
|
53 |
+
text = ftfy.fix_text(text)
|
54 |
+
text = html.unescape(html.unescape(text))
|
55 |
+
return text.strip()
|
56 |
+
|
57 |
+
|
58 |
+
def whitespace_clean(text):
|
59 |
+
text = re.sub(r'\s+', ' ', text)
|
60 |
+
text = text.strip()
|
61 |
+
return text
|
62 |
+
|
63 |
+
|
64 |
+
class SimpleTokenizer(object):
|
65 |
+
def __init__(self, bpe_path: str = default_bpe()):
|
66 |
+
self.byte_encoder = bytes_to_unicode()
|
67 |
+
self.byte_decoder = {v: k for k, v in self.byte_encoder.items()}
|
68 |
+
merges = gzip.open(bpe_path).read().decode("utf-8").split('\n')
|
69 |
+
merges = merges[1:49152-256-2+1]
|
70 |
+
merges = [tuple(merge.split()) for merge in merges]
|
71 |
+
vocab = list(bytes_to_unicode().values())
|
72 |
+
vocab = vocab + [v+'</w>' for v in vocab]
|
73 |
+
for merge in merges:
|
74 |
+
vocab.append(''.join(merge))
|
75 |
+
vocab.extend(['<|startoftext|>', '<|endoftext|>'])
|
76 |
+
self.encoder = dict(zip(vocab, range(len(vocab))))
|
77 |
+
self.decoder = {v: k for k, v in self.encoder.items()}
|
78 |
+
self.bpe_ranks = dict(zip(merges, range(len(merges))))
|
79 |
+
self.cache = {'<|startoftext|>': '<|startoftext|>', '<|endoftext|>': '<|endoftext|>'}
|
80 |
+
self.pat = re.compile(r"""<\|startoftext\|>|<\|endoftext\|>|'s|'t|'re|'ve|'m|'ll|'d|[\p{L}]+|[\p{N}]|[^\s\p{L}\p{N}]+""", re.IGNORECASE)
|
81 |
+
|
82 |
+
def bpe(self, token):
|
83 |
+
if token in self.cache:
|
84 |
+
return self.cache[token]
|
85 |
+
word = tuple(token[:-1]) + ( token[-1] + '</w>',)
|
86 |
+
pairs = get_pairs(word)
|
87 |
+
|
88 |
+
if not pairs:
|
89 |
+
return token+'</w>'
|
90 |
+
|
91 |
+
while True:
|
92 |
+
bigram = min(pairs, key = lambda pair: self.bpe_ranks.get(pair, float('inf')))
|
93 |
+
if bigram not in self.bpe_ranks:
|
94 |
+
break
|
95 |
+
first, second = bigram
|
96 |
+
new_word = []
|
97 |
+
i = 0
|
98 |
+
while i < len(word):
|
99 |
+
try:
|
100 |
+
j = word.index(first, i)
|
101 |
+
new_word.extend(word[i:j])
|
102 |
+
i = j
|
103 |
+
except:
|
104 |
+
new_word.extend(word[i:])
|
105 |
+
break
|
106 |
+
|
107 |
+
if word[i] == first and i < len(word)-1 and word[i+1] == second:
|
108 |
+
new_word.append(first+second)
|
109 |
+
i += 2
|
110 |
+
else:
|
111 |
+
new_word.append(word[i])
|
112 |
+
i += 1
|
113 |
+
new_word = tuple(new_word)
|
114 |
+
word = new_word
|
115 |
+
if len(word) == 1:
|
116 |
+
break
|
117 |
+
else:
|
118 |
+
pairs = get_pairs(word)
|
119 |
+
word = ' '.join(word)
|
120 |
+
self.cache[token] = word
|
121 |
+
return word
|
122 |
+
|
123 |
+
def encode(self, text):
|
124 |
+
bpe_tokens = []
|
125 |
+
text = whitespace_clean(basic_clean(text)).lower()
|
126 |
+
for token in re.findall(self.pat, text):
|
127 |
+
token = ''.join(self.byte_encoder[b] for b in token.encode('utf-8'))
|
128 |
+
bpe_tokens.extend(self.encoder[bpe_token] for bpe_token in self.bpe(token).split(' '))
|
129 |
+
return bpe_tokens
|
130 |
+
|
131 |
+
def decode(self, tokens):
|
132 |
+
text = ''.join([self.decoder[token] for token in tokens])
|
133 |
+
text = bytearray([self.byte_decoder[c] for c in text]).decode('utf-8', errors="replace").replace('</w>', ' ')
|
134 |
+
return text
|
convert.py
ADDED
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from cgitb import text
|
2 |
+
import os
|
3 |
+
|
4 |
+
import clip
|
5 |
+
import torch.onnx
|
6 |
+
import torch
|
7 |
+
from torch import nn
|
8 |
+
from multiprocessing import Pool
|
9 |
+
|
10 |
+
class TextTransformer(nn.Module):
|
11 |
+
def __init__(self, clip_model):
|
12 |
+
super().__init__()
|
13 |
+
self.clip_model = clip_model
|
14 |
+
|
15 |
+
def forward(self, x: torch.Tensor):
|
16 |
+
return self.clip_model.encode_text(x)
|
17 |
+
|
18 |
+
def export(model, input, path):
|
19 |
+
print(f"Exporting to {path}")
|
20 |
+
torch.onnx.export(
|
21 |
+
model, # model being run
|
22 |
+
input, # model input (or a tuple for multiple inputs)
|
23 |
+
path, # where to save the model (can be a file or file-like object)
|
24 |
+
export_params=True, # store the trained parameter weights inside the model file
|
25 |
+
opset_version=16, # the ONNX version to export the model to
|
26 |
+
do_constant_folding=True, # whether to execute constant folding for optimization
|
27 |
+
input_names = ['input'], # the model's input names
|
28 |
+
output_names = ['output'], # the model's output names
|
29 |
+
dynamic_axes={
|
30 |
+
'input' : {0 : 'batch_size'}, # variable length axes
|
31 |
+
'output' : {0 : 'batch_size'}
|
32 |
+
}
|
33 |
+
)
|
34 |
+
|
35 |
+
def convert(model_name, dashed_name):
|
36 |
+
visual_path = f"{output_dir}/clip-{dashed_name}-visual.onnx"
|
37 |
+
textual_path = f"{output_dir}/clip-{dashed_name}-textual.onnx"
|
38 |
+
visual_exists = os.path.exists(visual_path)
|
39 |
+
textual_exists = os.path.exists(textual_path)
|
40 |
+
if visual_exists and textual_exists:
|
41 |
+
print(f"{visual_path} exists, skipping")
|
42 |
+
print(f"{textual_path} exists, skipping")
|
43 |
+
return
|
44 |
+
|
45 |
+
print(f"Model: {model_name}")
|
46 |
+
print(f"Loading CLIP")
|
47 |
+
model, _ = clip.load(model_name, device=device)
|
48 |
+
model = model.to(device=device)
|
49 |
+
|
50 |
+
|
51 |
+
if not visual_exists:
|
52 |
+
input_res = model.visual.input_resolution
|
53 |
+
export(
|
54 |
+
model.visual,
|
55 |
+
torch.rand(1, 3, input_res, input_res),
|
56 |
+
visual_path,
|
57 |
+
)
|
58 |
+
else:
|
59 |
+
print(f"{visual_path} exists, skipping")
|
60 |
+
|
61 |
+
if not textual_exists:
|
62 |
+
text_transformer = TextTransformer(model)
|
63 |
+
export(
|
64 |
+
text_transformer,
|
65 |
+
clip.tokenize(["hello onnx"]).to(device),
|
66 |
+
textual_path,
|
67 |
+
)
|
68 |
+
else:
|
69 |
+
print(f"{textual_path} exists, skipping")
|
70 |
+
|
71 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
72 |
+
device = "cpu"
|
73 |
+
output_dir = "models"
|
74 |
+
if __name__ == "__main__":
|
75 |
+
print(f"Torch device: {device}")
|
76 |
+
|
77 |
+
available_models = clip.available_models()
|
78 |
+
print(f"Available models: {available_models}")
|
79 |
+
|
80 |
+
models = [
|
81 |
+
("RN50", "resnet-50"),
|
82 |
+
("RN101", "resnet-101"),
|
83 |
+
("RN50x4", "resnet-50x4"),
|
84 |
+
("RN50x16", "resnet-50x16"),
|
85 |
+
("RN50x64", "resnet-50x64"),
|
86 |
+
("RN50", "resnet-50"),
|
87 |
+
("RN50", "resnet-50"),
|
88 |
+
("RN50", "resnet-50"),
|
89 |
+
("ViT-B/16", "vit-base-patch16"),
|
90 |
+
("ViT-B/32", "vit-base-patch32"),
|
91 |
+
("ViT-L/14", "vit-large-patch14"),
|
92 |
+
("ViT-L/14@336px", "vit-large-patch14-336"),
|
93 |
+
]
|
94 |
+
|
95 |
+
print(f"Converting models: {models}")
|
96 |
+
|
97 |
+
for model in models:
|
98 |
+
convert(*model)
|
99 |
+
|
100 |
+
# For converting multiple models at once
|
101 |
+
# with Pool(1) as p:
|
102 |
+
# p.starmap(convert, models)
|
103 |
+
|
104 |
+
print("done")
|
example.py
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from operator import itemgetter
|
2 |
+
import numpy as np
|
3 |
+
from tabulate import tabulate
|
4 |
+
|
5 |
+
from cliponnx.models import TextualModel, VisualModel
|
6 |
+
|
7 |
+
def cosine_similarity(a, b):
|
8 |
+
return np.dot(a, b) / (np.linalg.norm(a) * np.linalg.norm(b))
|
9 |
+
|
10 |
+
# With GPU (slower startup, faster inference with supported cards)
|
11 |
+
# providers = ['TensorrtExecutionProvider', 'CUDAExecutionProvider', 'CPUExecutionProvider']
|
12 |
+
|
13 |
+
# CPU only (faster startup, slower inference)
|
14 |
+
providers = ['CPUExecutionProvider']
|
15 |
+
|
16 |
+
images = [
|
17 |
+
"flowers.jpg",
|
18 |
+
"heavy-industry.jpg",
|
19 |
+
]
|
20 |
+
|
21 |
+
texts = [
|
22 |
+
"a close up photo of a cherry blossom",
|
23 |
+
"cherry blossom",
|
24 |
+
"flowers",
|
25 |
+
"plant",
|
26 |
+
"processing plant",
|
27 |
+
"a large industrial plant with many pipes, walkways and railings",
|
28 |
+
"ruhrgebiet",
|
29 |
+
"industry",
|
30 |
+
"a photo taken on a bright and sunny day",
|
31 |
+
"a photo taken on a dark and cloudy day",
|
32 |
+
"a photo taken at midnight",
|
33 |
+
"bees",
|
34 |
+
"cars",
|
35 |
+
"dogs and cats",
|
36 |
+
]
|
37 |
+
|
38 |
+
visual = VisualModel("models/clip-vit-base-patch32-visual-float16.onnx", providers=providers)
|
39 |
+
images_input = visual.preprocess_images(images)
|
40 |
+
print(f"Images shape: {images_input.shape}")
|
41 |
+
image_embeddings = visual.encode(images_input)
|
42 |
+
print(f"Embeddings shape: {image_embeddings.shape}")
|
43 |
+
print()
|
44 |
+
|
45 |
+
textual = TextualModel("models/clip-vit-base-patch32-textual-float16.onnx", providers=providers)
|
46 |
+
texts_input = textual.tokenize(texts)
|
47 |
+
print(f"Texts shape: {texts_input.shape}")
|
48 |
+
text_embeddings = textual.encode(texts_input)
|
49 |
+
print(f"Embeddings shape: {text_embeddings.shape}")
|
50 |
+
print()
|
51 |
+
|
52 |
+
table = [["image", "similarity", "text"]]
|
53 |
+
|
54 |
+
for ii, image in enumerate(images):
|
55 |
+
image_embedding = image_embeddings[ii]
|
56 |
+
|
57 |
+
similarities = []
|
58 |
+
for ti, text in enumerate(texts):
|
59 |
+
text_embedding = text_embeddings[ti]
|
60 |
+
similarity = cosine_similarity(image_embedding, text_embedding)
|
61 |
+
similarities.append([similarity, ">" * int(similarity * 30), text])
|
62 |
+
|
63 |
+
similarities.sort(reverse=True, key=itemgetter(0))
|
64 |
+
print(image)
|
65 |
+
print(tabulate(similarities))
|
66 |
+
print()
|
flowers.jpg
ADDED
Git LFS Details
|
heavy-industry.jpg
ADDED
Git LFS Details
|
poetry.lock
ADDED
@@ -0,0 +1,841 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[[package]]
|
2 |
+
name = "certifi"
|
3 |
+
version = "2022.9.24"
|
4 |
+
description = "Python package for providing Mozilla's CA Bundle."
|
5 |
+
category = "main"
|
6 |
+
optional = false
|
7 |
+
python-versions = ">=3.6"
|
8 |
+
|
9 |
+
[[package]]
|
10 |
+
name = "charset-normalizer"
|
11 |
+
version = "2.1.1"
|
12 |
+
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
13 |
+
category = "main"
|
14 |
+
optional = false
|
15 |
+
python-versions = ">=3.6.0"
|
16 |
+
|
17 |
+
[package.extras]
|
18 |
+
unicode_backport = ["unicodedata2"]
|
19 |
+
|
20 |
+
[[package]]
|
21 |
+
name = "clip"
|
22 |
+
version = "1.0"
|
23 |
+
description = ""
|
24 |
+
category = "main"
|
25 |
+
optional = false
|
26 |
+
python-versions = "*"
|
27 |
+
develop = false
|
28 |
+
|
29 |
+
[package.dependencies]
|
30 |
+
ftfy = "*"
|
31 |
+
regex = "*"
|
32 |
+
torch = "*"
|
33 |
+
torchvision = "*"
|
34 |
+
tqdm = "*"
|
35 |
+
|
36 |
+
[package.extras]
|
37 |
+
dev = ["pytest"]
|
38 |
+
|
39 |
+
[package.source]
|
40 |
+
type = "git"
|
41 |
+
url = "https://github.com/openai/CLIP.git"
|
42 |
+
reference = "HEAD"
|
43 |
+
resolved_reference = "d50d76daa670286dd6cacf3bcd80b5e4823fc8e1"
|
44 |
+
|
45 |
+
[[package]]
|
46 |
+
name = "colorama"
|
47 |
+
version = "0.4.5"
|
48 |
+
description = "Cross-platform colored terminal text."
|
49 |
+
category = "main"
|
50 |
+
optional = false
|
51 |
+
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
52 |
+
|
53 |
+
[[package]]
|
54 |
+
name = "coloredlogs"
|
55 |
+
version = "15.0.1"
|
56 |
+
description = "Colored terminal output for Python's logging module"
|
57 |
+
category = "main"
|
58 |
+
optional = false
|
59 |
+
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
60 |
+
|
61 |
+
[package.dependencies]
|
62 |
+
humanfriendly = ">=9.1"
|
63 |
+
|
64 |
+
[package.extras]
|
65 |
+
cron = ["capturer (>=2.4)"]
|
66 |
+
|
67 |
+
[[package]]
|
68 |
+
name = "flatbuffers"
|
69 |
+
version = "22.9.24"
|
70 |
+
description = "The FlatBuffers serialization format for Python"
|
71 |
+
category = "main"
|
72 |
+
optional = false
|
73 |
+
python-versions = "*"
|
74 |
+
|
75 |
+
[[package]]
|
76 |
+
name = "ftfy"
|
77 |
+
version = "6.1.1"
|
78 |
+
description = "Fixes mojibake and other problems with Unicode, after the fact"
|
79 |
+
category = "main"
|
80 |
+
optional = false
|
81 |
+
python-versions = ">=3.7,<4"
|
82 |
+
|
83 |
+
[package.dependencies]
|
84 |
+
wcwidth = ">=0.2.5"
|
85 |
+
|
86 |
+
[[package]]
|
87 |
+
name = "humanfriendly"
|
88 |
+
version = "10.0"
|
89 |
+
description = "Human friendly output for text interfaces using Python"
|
90 |
+
category = "main"
|
91 |
+
optional = false
|
92 |
+
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
93 |
+
|
94 |
+
[package.dependencies]
|
95 |
+
pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""}
|
96 |
+
|
97 |
+
[[package]]
|
98 |
+
name = "idna"
|
99 |
+
version = "3.4"
|
100 |
+
description = "Internationalized Domain Names in Applications (IDNA)"
|
101 |
+
category = "main"
|
102 |
+
optional = false
|
103 |
+
python-versions = ">=3.5"
|
104 |
+
|
105 |
+
[[package]]
|
106 |
+
name = "joblib"
|
107 |
+
version = "1.2.0"
|
108 |
+
description = "Lightweight pipelining with Python functions"
|
109 |
+
category = "main"
|
110 |
+
optional = false
|
111 |
+
python-versions = ">=3.7"
|
112 |
+
|
113 |
+
[[package]]
|
114 |
+
name = "mpmath"
|
115 |
+
version = "1.2.1"
|
116 |
+
description = "Python library for arbitrary-precision floating-point arithmetic"
|
117 |
+
category = "main"
|
118 |
+
optional = false
|
119 |
+
python-versions = "*"
|
120 |
+
|
121 |
+
[package.extras]
|
122 |
+
develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"]
|
123 |
+
tests = ["pytest (>=4.6)"]
|
124 |
+
|
125 |
+
[[package]]
|
126 |
+
name = "numpy"
|
127 |
+
version = "1.23.3"
|
128 |
+
description = "NumPy is the fundamental package for array computing with Python."
|
129 |
+
category = "main"
|
130 |
+
optional = false
|
131 |
+
python-versions = ">=3.8"
|
132 |
+
|
133 |
+
[[package]]
|
134 |
+
name = "onnx"
|
135 |
+
version = "1.11.0"
|
136 |
+
description = "Open Neural Network Exchange"
|
137 |
+
category = "main"
|
138 |
+
optional = false
|
139 |
+
python-versions = "*"
|
140 |
+
|
141 |
+
[package.dependencies]
|
142 |
+
numpy = ">=1.16.6"
|
143 |
+
protobuf = ">=3.12.2"
|
144 |
+
typing-extensions = ">=3.6.2.1"
|
145 |
+
|
146 |
+
[package.extras]
|
147 |
+
mypy = ["mypy (==0.782)", "types-protobuf (==3.18.4)"]
|
148 |
+
|
149 |
+
[[package]]
|
150 |
+
name = "onnxconverter-common"
|
151 |
+
version = "1.12.2"
|
152 |
+
description = "ONNX Converter and Optimization Tools"
|
153 |
+
category = "main"
|
154 |
+
optional = false
|
155 |
+
python-versions = "*"
|
156 |
+
|
157 |
+
[package.dependencies]
|
158 |
+
numpy = "*"
|
159 |
+
onnx = "*"
|
160 |
+
protobuf = "*"
|
161 |
+
|
162 |
+
[[package]]
|
163 |
+
name = "onnxmltools"
|
164 |
+
version = "1.11.1"
|
165 |
+
description = "Converts Machine Learning models to ONNX"
|
166 |
+
category = "main"
|
167 |
+
optional = false
|
168 |
+
python-versions = "*"
|
169 |
+
|
170 |
+
[package.dependencies]
|
171 |
+
numpy = "*"
|
172 |
+
onnx = "*"
|
173 |
+
skl2onnx = "*"
|
174 |
+
|
175 |
+
[[package]]
|
176 |
+
name = "onnxruntime"
|
177 |
+
version = "1.12.1"
|
178 |
+
description = "ONNX Runtime is a runtime accelerator for Machine Learning models"
|
179 |
+
category = "main"
|
180 |
+
optional = false
|
181 |
+
python-versions = "*"
|
182 |
+
|
183 |
+
[package.dependencies]
|
184 |
+
coloredlogs = "*"
|
185 |
+
flatbuffers = "*"
|
186 |
+
numpy = ">=1.21.0"
|
187 |
+
packaging = "*"
|
188 |
+
protobuf = "*"
|
189 |
+
sympy = "*"
|
190 |
+
|
191 |
+
[[package]]
|
192 |
+
name = "packaging"
|
193 |
+
version = "21.3"
|
194 |
+
description = "Core utilities for Python packages"
|
195 |
+
category = "main"
|
196 |
+
optional = false
|
197 |
+
python-versions = ">=3.6"
|
198 |
+
|
199 |
+
[package.dependencies]
|
200 |
+
pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
|
201 |
+
|
202 |
+
[[package]]
|
203 |
+
name = "Pillow"
|
204 |
+
version = "9.2.0"
|
205 |
+
description = "Python Imaging Library (Fork)"
|
206 |
+
category = "main"
|
207 |
+
optional = false
|
208 |
+
python-versions = ">=3.7"
|
209 |
+
|
210 |
+
[package.extras]
|
211 |
+
docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"]
|
212 |
+
tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
|
213 |
+
|
214 |
+
[[package]]
|
215 |
+
name = "protobuf"
|
216 |
+
version = "4.21.7"
|
217 |
+
description = ""
|
218 |
+
category = "main"
|
219 |
+
optional = false
|
220 |
+
python-versions = ">=3.7"
|
221 |
+
|
222 |
+
[[package]]
|
223 |
+
name = "pyparsing"
|
224 |
+
version = "3.0.9"
|
225 |
+
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
|
226 |
+
category = "main"
|
227 |
+
optional = false
|
228 |
+
python-versions = ">=3.6.8"
|
229 |
+
|
230 |
+
[package.extras]
|
231 |
+
diagrams = ["jinja2", "railroad-diagrams"]
|
232 |
+
|
233 |
+
[[package]]
|
234 |
+
name = "pyreadline3"
|
235 |
+
version = "3.4.1"
|
236 |
+
description = "A python implementation of GNU readline."
|
237 |
+
category = "main"
|
238 |
+
optional = false
|
239 |
+
python-versions = "*"
|
240 |
+
|
241 |
+
[[package]]
|
242 |
+
name = "regex"
|
243 |
+
version = "2022.9.13"
|
244 |
+
description = "Alternative regular expression module, to replace re."
|
245 |
+
category = "main"
|
246 |
+
optional = false
|
247 |
+
python-versions = ">=3.6"
|
248 |
+
|
249 |
+
[[package]]
|
250 |
+
name = "requests"
|
251 |
+
version = "2.28.1"
|
252 |
+
description = "Python HTTP for Humans."
|
253 |
+
category = "main"
|
254 |
+
optional = false
|
255 |
+
python-versions = ">=3.7, <4"
|
256 |
+
|
257 |
+
[package.dependencies]
|
258 |
+
certifi = ">=2017.4.17"
|
259 |
+
charset-normalizer = ">=2,<3"
|
260 |
+
idna = ">=2.5,<4"
|
261 |
+
urllib3 = ">=1.21.1,<1.27"
|
262 |
+
|
263 |
+
[package.extras]
|
264 |
+
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
265 |
+
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
|
266 |
+
|
267 |
+
[[package]]
|
268 |
+
name = "scikit-learn"
|
269 |
+
version = "1.1.1"
|
270 |
+
description = "A set of python modules for machine learning and data mining"
|
271 |
+
category = "main"
|
272 |
+
optional = false
|
273 |
+
python-versions = ">=3.8"
|
274 |
+
|
275 |
+
[package.dependencies]
|
276 |
+
joblib = ">=1.0.0"
|
277 |
+
numpy = ">=1.17.3"
|
278 |
+
scipy = ">=1.3.2"
|
279 |
+
threadpoolctl = ">=2.0.0"
|
280 |
+
|
281 |
+
[package.extras]
|
282 |
+
benchmark = ["matplotlib (>=3.1.2)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"]
|
283 |
+
docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.2)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "scikit-image (>=0.14.5)", "seaborn (>=0.9.0)", "sphinx (>=4.0.1)", "sphinx-gallery (>=0.7.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"]
|
284 |
+
examples = ["matplotlib (>=3.1.2)", "pandas (>=1.0.5)", "scikit-image (>=0.14.5)", "seaborn (>=0.9.0)"]
|
285 |
+
tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.2)", "mypy (>=0.770)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pyamg (>=4.0.0)", "pytest (>=5.0.1)", "pytest-cov (>=2.9.0)", "scikit-image (>=0.14.5)"]
|
286 |
+
|
287 |
+
[[package]]
|
288 |
+
name = "scipy"
|
289 |
+
version = "1.6.1"
|
290 |
+
description = "SciPy: Scientific Library for Python"
|
291 |
+
category = "main"
|
292 |
+
optional = false
|
293 |
+
python-versions = ">=3.7"
|
294 |
+
|
295 |
+
[package.dependencies]
|
296 |
+
numpy = ">=1.16.5"
|
297 |
+
|
298 |
+
[[package]]
|
299 |
+
name = "skl2onnx"
|
300 |
+
version = "1.13"
|
301 |
+
description = "Convert scikit-learn models to ONNX"
|
302 |
+
category = "main"
|
303 |
+
optional = false
|
304 |
+
python-versions = "*"
|
305 |
+
|
306 |
+
[package.dependencies]
|
307 |
+
numpy = ">=1.15"
|
308 |
+
onnx = ">=1.2.1"
|
309 |
+
onnxconverter-common = ">=1.7.0"
|
310 |
+
protobuf = "*"
|
311 |
+
scikit-learn = ">=0.19,<=1.1.1"
|
312 |
+
scipy = ">=1.0"
|
313 |
+
|
314 |
+
[[package]]
|
315 |
+
name = "sympy"
|
316 |
+
version = "1.11.1"
|
317 |
+
description = "Computer algebra system (CAS) in Python"
|
318 |
+
category = "main"
|
319 |
+
optional = false
|
320 |
+
python-versions = ">=3.8"
|
321 |
+
|
322 |
+
[package.dependencies]
|
323 |
+
mpmath = ">=0.19"
|
324 |
+
|
325 |
+
[[package]]
|
326 |
+
name = "tabulate"
|
327 |
+
version = "0.8.10"
|
328 |
+
description = "Pretty-print tabular data"
|
329 |
+
category = "main"
|
330 |
+
optional = false
|
331 |
+
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
332 |
+
|
333 |
+
[package.extras]
|
334 |
+
widechars = ["wcwidth"]
|
335 |
+
|
336 |
+
[[package]]
|
337 |
+
name = "threadpoolctl"
|
338 |
+
version = "3.1.0"
|
339 |
+
description = "threadpoolctl"
|
340 |
+
category = "main"
|
341 |
+
optional = false
|
342 |
+
python-versions = ">=3.6"
|
343 |
+
|
344 |
+
[[package]]
|
345 |
+
name = "torch"
|
346 |
+
version = "1.12.1+cu116"
|
347 |
+
description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration"
|
348 |
+
category = "main"
|
349 |
+
optional = false
|
350 |
+
python-versions = ">=3.7.0"
|
351 |
+
|
352 |
+
[package.dependencies]
|
353 |
+
typing-extensions = "*"
|
354 |
+
|
355 |
+
[package.source]
|
356 |
+
type = "legacy"
|
357 |
+
url = "https://download.pytorch.org/whl/cu116"
|
358 |
+
reference = "torch"
|
359 |
+
|
360 |
+
[[package]]
|
361 |
+
name = "torchvision"
|
362 |
+
version = "0.13.1+cu116"
|
363 |
+
description = "image and video datasets and models for torch deep learning"
|
364 |
+
category = "main"
|
365 |
+
optional = false
|
366 |
+
python-versions = ">=3.7"
|
367 |
+
|
368 |
+
[package.dependencies]
|
369 |
+
numpy = "*"
|
370 |
+
pillow = ">=5.3.0,<8.3.0 || >=8.4.0"
|
371 |
+
requests = "*"
|
372 |
+
torch = "1.12.1"
|
373 |
+
typing-extensions = "*"
|
374 |
+
|
375 |
+
[package.extras]
|
376 |
+
scipy = ["scipy"]
|
377 |
+
|
378 |
+
[package.source]
|
379 |
+
type = "legacy"
|
380 |
+
url = "https://download.pytorch.org/whl/cu116"
|
381 |
+
reference = "torch"
|
382 |
+
|
383 |
+
[[package]]
|
384 |
+
name = "tqdm"
|
385 |
+
version = "4.64.1"
|
386 |
+
description = "Fast, Extensible Progress Meter"
|
387 |
+
category = "main"
|
388 |
+
optional = false
|
389 |
+
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
|
390 |
+
|
391 |
+
[package.dependencies]
|
392 |
+
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
393 |
+
|
394 |
+
[package.extras]
|
395 |
+
dev = ["py-make (>=0.1.0)", "twine", "wheel"]
|
396 |
+
notebook = ["ipywidgets (>=6)"]
|
397 |
+
slack = ["slack-sdk"]
|
398 |
+
telegram = ["requests"]
|
399 |
+
|
400 |
+
[[package]]
|
401 |
+
name = "typing-extensions"
|
402 |
+
version = "4.3.0"
|
403 |
+
description = "Backported and Experimental Type Hints for Python 3.7+"
|
404 |
+
category = "main"
|
405 |
+
optional = false
|
406 |
+
python-versions = ">=3.7"
|
407 |
+
|
408 |
+
[[package]]
|
409 |
+
name = "urllib3"
|
410 |
+
version = "1.26.12"
|
411 |
+
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
412 |
+
category = "main"
|
413 |
+
optional = false
|
414 |
+
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
|
415 |
+
|
416 |
+
[package.extras]
|
417 |
+
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
|
418 |
+
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
|
419 |
+
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
420 |
+
|
421 |
+
[[package]]
|
422 |
+
name = "wcwidth"
|
423 |
+
version = "0.2.5"
|
424 |
+
description = "Measures the displayed width of unicode strings in a terminal"
|
425 |
+
category = "main"
|
426 |
+
optional = false
|
427 |
+
python-versions = "*"
|
428 |
+
|
429 |
+
[metadata]
|
430 |
+
lock-version = "1.1"
|
431 |
+
python-versions = "^3.9"
|
432 |
+
content-hash = "2d11d7968b077f8474fb974736347963175d486d3d0e83cbcfd8d2d16b9a9703"
|
433 |
+
|
434 |
+
[metadata.files]
|
435 |
+
certifi = [
|
436 |
+
{file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"},
|
437 |
+
{file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"},
|
438 |
+
]
|
439 |
+
charset-normalizer = [
|
440 |
+
{file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
|
441 |
+
{file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
|
442 |
+
]
|
443 |
+
clip = []
|
444 |
+
colorama = [
|
445 |
+
{file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
|
446 |
+
{file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
|
447 |
+
]
|
448 |
+
coloredlogs = [
|
449 |
+
{file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"},
|
450 |
+
{file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"},
|
451 |
+
]
|
452 |
+
flatbuffers = [
|
453 |
+
{file = "flatbuffers-22.9.24-py2.py3-none-any.whl", hash = "sha256:fc30f024e2eee55922d610f4d68626002fcd3c8f87d8058ec5ae9edd86993bcb"},
|
454 |
+
]
|
455 |
+
ftfy = [
|
456 |
+
{file = "ftfy-6.1.1-py3-none-any.whl", hash = "sha256:0ffd33fce16b54cccaec78d6ec73d95ad370e5df5a25255c8966a6147bd667ca"},
|
457 |
+
{file = "ftfy-6.1.1.tar.gz", hash = "sha256:bfc2019f84fcd851419152320a6375604a0f1459c281b5b199b2cd0d2e727f8f"},
|
458 |
+
]
|
459 |
+
humanfriendly = [
|
460 |
+
{file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"},
|
461 |
+
{file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"},
|
462 |
+
]
|
463 |
+
idna = [
|
464 |
+
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
465 |
+
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
466 |
+
]
|
467 |
+
joblib = [
|
468 |
+
{file = "joblib-1.2.0-py3-none-any.whl", hash = "sha256:091138ed78f800342968c523bdde947e7a305b8594b910a0fea2ab83c3c6d385"},
|
469 |
+
{file = "joblib-1.2.0.tar.gz", hash = "sha256:e1cee4a79e4af22881164f218d4311f60074197fb707e082e803b61f6d137018"},
|
470 |
+
]
|
471 |
+
mpmath = [
|
472 |
+
{file = "mpmath-1.2.1-py3-none-any.whl", hash = "sha256:604bc21bd22d2322a177c73bdb573994ef76e62edd595d17e00aff24b0667e5c"},
|
473 |
+
{file = "mpmath-1.2.1.tar.gz", hash = "sha256:79ffb45cf9f4b101a807595bcb3e72e0396202e0b1d25d689134b48c4216a81a"},
|
474 |
+
]
|
475 |
+
numpy = [
|
476 |
+
{file = "numpy-1.23.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c9f707b5bb73bf277d812ded9896f9512a43edff72712f31667d0a8c2f8e71ee"},
|
477 |
+
{file = "numpy-1.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffcf105ecdd9396e05a8e58e81faaaf34d3f9875f137c7372450baa5d77c9a54"},
|
478 |
+
{file = "numpy-1.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ea3f98a0ffce3f8f57675eb9119f3f4edb81888b6874bc1953f91e0b1d4f440"},
|
479 |
+
{file = "numpy-1.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004f0efcb2fe1c0bd6ae1fcfc69cc8b6bf2407e0f18be308612007a0762b4089"},
|
480 |
+
{file = "numpy-1.23.3-cp310-cp310-win32.whl", hash = "sha256:98dcbc02e39b1658dc4b4508442a560fe3ca5ca0d989f0df062534e5ca3a5c1a"},
|
481 |
+
{file = "numpy-1.23.3-cp310-cp310-win_amd64.whl", hash = "sha256:39a664e3d26ea854211867d20ebcc8023257c1800ae89773cbba9f9e97bae036"},
|
482 |
+
{file = "numpy-1.23.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1f27b5322ac4067e67c8f9378b41c746d8feac8bdd0e0ffede5324667b8a075c"},
|
483 |
+
{file = "numpy-1.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ad3ec9a748a8943e6eb4358201f7e1c12ede35f510b1a2221b70af4bb64295c"},
|
484 |
+
{file = "numpy-1.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdc9febce3e68b697d931941b263c59e0c74e8f18861f4064c1f712562903411"},
|
485 |
+
{file = "numpy-1.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:301c00cf5e60e08e04d842fc47df641d4a181e651c7135c50dc2762ffe293dbd"},
|
486 |
+
{file = "numpy-1.23.3-cp311-cp311-win32.whl", hash = "sha256:7cd1328e5bdf0dee621912f5833648e2daca72e3839ec1d6695e91089625f0b4"},
|
487 |
+
{file = "numpy-1.23.3-cp311-cp311-win_amd64.whl", hash = "sha256:8355fc10fd33a5a70981a5b8a0de51d10af3688d7a9e4a34fcc8fa0d7467bb7f"},
|
488 |
+
{file = "numpy-1.23.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc6e8da415f359b578b00bcfb1d08411c96e9a97f9e6c7adada554a0812a6cc6"},
|
489 |
+
{file = "numpy-1.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:22d43376ee0acd547f3149b9ec12eec2f0ca4a6ab2f61753c5b29bb3e795ac4d"},
|
490 |
+
{file = "numpy-1.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a64403f634e5ffdcd85e0b12c08f04b3080d3e840aef118721021f9b48fc1460"},
|
491 |
+
{file = "numpy-1.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd9d3abe5774404becdb0748178b48a218f1d8c44e0375475732211ea47c67e"},
|
492 |
+
{file = "numpy-1.23.3-cp38-cp38-win32.whl", hash = "sha256:f8c02ec3c4c4fcb718fdf89a6c6f709b14949408e8cf2a2be5bfa9c49548fd85"},
|
493 |
+
{file = "numpy-1.23.3-cp38-cp38-win_amd64.whl", hash = "sha256:e868b0389c5ccfc092031a861d4e158ea164d8b7fdbb10e3b5689b4fc6498df6"},
|
494 |
+
{file = "numpy-1.23.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09f6b7bdffe57fc61d869a22f506049825d707b288039d30f26a0d0d8ea05164"},
|
495 |
+
{file = "numpy-1.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8c79d7cf86d049d0c5089231a5bcd31edb03555bd93d81a16870aa98c6cfb79d"},
|
496 |
+
{file = "numpy-1.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5d5420053bbb3dd64c30e58f9363d7a9c27444c3648e61460c1237f9ec3fa14"},
|
497 |
+
{file = "numpy-1.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5422d6a1ea9b15577a9432e26608c73a78faf0b9039437b075cf322c92e98e7"},
|
498 |
+
{file = "numpy-1.23.3-cp39-cp39-win32.whl", hash = "sha256:c1ba66c48b19cc9c2975c0d354f24058888cdc674bebadceb3cdc9ec403fb5d1"},
|
499 |
+
{file = "numpy-1.23.3-cp39-cp39-win_amd64.whl", hash = "sha256:78a63d2df1d947bd9d1b11d35564c2f9e4b57898aae4626638056ec1a231c40c"},
|
500 |
+
{file = "numpy-1.23.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:17c0e467ade9bda685d5ac7f5fa729d8d3e76b23195471adae2d6a6941bd2c18"},
|
501 |
+
{file = "numpy-1.23.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91b8d6768a75247026e951dce3b2aac79dc7e78622fc148329135ba189813584"},
|
502 |
+
{file = "numpy-1.23.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:94c15ca4e52671a59219146ff584488907b1f9b3fc232622b47e2cf832e94fb8"},
|
503 |
+
{file = "numpy-1.23.3.tar.gz", hash = "sha256:51bf49c0cd1d52be0a240aa66f3458afc4b95d8993d2d04f0d91fa60c10af6cd"},
|
504 |
+
]
|
505 |
+
onnx = [
|
506 |
+
{file = "onnx-1.11.0-cp36-cp36m-macosx_10_12_x86_64.whl", hash = "sha256:a6e9135f1d02539ca7573f699fb0d31d3c43d10fac1d2d2239a9a1c553506c29"},
|
507 |
+
{file = "onnx-1.11.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b2de0b117ad77689d308824a0c9eb89539ec28a799b4e2e05b3bb977b0da0b45"},
|
508 |
+
{file = "onnx-1.11.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f335d982b8ed201cf767459b993630acfd20c32b100529f70af9f28a26e72167"},
|
509 |
+
{file = "onnx-1.11.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:593ca9e11f15afa26b3aaf2d170bb803d4bd86dbd560aa7be4e5f535d03f83d5"},
|
510 |
+
{file = "onnx-1.11.0-cp36-cp36m-win32.whl", hash = "sha256:df85666ab2b88fd9cf9b2504bcb551da39422eab65a143926a8db58f81b09164"},
|
511 |
+
{file = "onnx-1.11.0-cp36-cp36m-win_amd64.whl", hash = "sha256:82221a07707b1ccf71fb18c6abb77f2566517a55d5185809775b5ff008bfb35c"},
|
512 |
+
{file = "onnx-1.11.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:4aa899f74acd4c5543f0efed8bfe98a3b701df75c5ffa179212e3088c51971bb"},
|
513 |
+
{file = "onnx-1.11.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:58d4873ec587ac14c44227d8027787edc88cd61596e646e3417f2a826a920898"},
|
514 |
+
{file = "onnx-1.11.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a2f5d6998fe79aed80fad9d4522140d02c4d29513047e335d5c5355c1ebda5e"},
|
515 |
+
{file = "onnx-1.11.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb46f31f12bb0bfdcfb68497d10b20447cf8fa6c4f693120c013e052645357b8"},
|
516 |
+
{file = "onnx-1.11.0-cp37-cp37m-win32.whl", hash = "sha256:997d91ffd7b7ae7aee09c6d652a896d906be430d425865c759b51a8de5df9fe0"},
|
517 |
+
{file = "onnx-1.11.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ea06dbf57a287657b6dc4e189918e4cb451450308589d482117216194d6f83d6"},
|
518 |
+
{file = "onnx-1.11.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c3d3503110f2cab2c818f4a7b2bc8abc3bc79649daa39e70d5fb504b208ddb1e"},
|
519 |
+
{file = "onnx-1.11.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9b9f58ea01c1b20b057f55f628df4fc0403bbc160b7282a56e3bb4df5c7fb96f"},
|
520 |
+
{file = "onnx-1.11.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89420e5b824d7e182846fe2aa09190ddb41162b261465c6ca928174bc2ac10b7"},
|
521 |
+
{file = "onnx-1.11.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6ddbe89e32f885db736d36fcb132784e368331a18c3b6168ac9f561eb462057"},
|
522 |
+
{file = "onnx-1.11.0-cp38-cp38-win32.whl", hash = "sha256:0cf47c205b376b3763beef92a6de4152f3b1552d6f640d93044938500baf5958"},
|
523 |
+
{file = "onnx-1.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:d6581dd2122525549d1d8b431b8bf375298993c77bddb8fd0bf0d92611df76a1"},
|
524 |
+
{file = "onnx-1.11.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:4454906de80a351de6929b0896ad605d106c324c3112c92249240e531f68fbba"},
|
525 |
+
{file = "onnx-1.11.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ae74bf8fa343b64e2b7fe205091b7f3728887c018ae061d161dd86ec95eb66a8"},
|
526 |
+
{file = "onnx-1.11.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67c6d2654c1c203e5c839a47900b51f588fd0de71bbd497fb193d30a0b3ec1e9"},
|
527 |
+
{file = "onnx-1.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43b32a2f20c94aa98866deae9e4218faf0495144ad05402e918fa279674b6df9"},
|
528 |
+
{file = "onnx-1.11.0-cp39-cp39-win32.whl", hash = "sha256:7924d9baa13dbbf335737229f6d068f380d153679f357e495da60007b61cf56d"},
|
529 |
+
{file = "onnx-1.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:3403884c482859f8cf2e0c276da84bd9ac2235d266726f4ddc9625d3fd263218"},
|
530 |
+
{file = "onnx-1.11.0.tar.gz", hash = "sha256:eca224c7c2c8ee4072a0743e4898a84a9bdf8297b5e5910a2632e4c4182ffb2a"},
|
531 |
+
]
|
532 |
+
onnxconverter-common = [
|
533 |
+
{file = "onnxconverter_common-1.12.2-py2.py3-none-any.whl", hash = "sha256:29b7caade27aeda1b827232554cec352db8afc6e16c3e3ea8c4264449f9ff3a6"},
|
534 |
+
]
|
535 |
+
onnxmltools = [
|
536 |
+
{file = "onnxmltools-1.11.1-py3-none-any.whl", hash = "sha256:c8a108e36cb12b5f1393b03ffba05d3f6be16f421de5666ae9e25bbc3b593594"},
|
537 |
+
]
|
538 |
+
onnxruntime = [
|
539 |
+
{file = "onnxruntime-1.12.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:98bb8920036b6ae1bc71af1bb061cd42297717a4b25c0ba521f3471ef946e4f2"},
|
540 |
+
{file = "onnxruntime-1.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:977e4388c773a14cf2f71c6f4ac4f039691ab3ac7ade4e13e7f019d752eaa053"},
|
541 |
+
{file = "onnxruntime-1.12.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4749a89d2f820ae5d80704a55fedd233fa54dd2adaecf4423435eb68207dace7"},
|
542 |
+
{file = "onnxruntime-1.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2715aa4d0bc03acf92c79df3d52e7435ea9da3ab2ed2208ad66534a51d2e5de9"},
|
543 |
+
{file = "onnxruntime-1.12.1-cp310-cp310-manylinux_2_27_x86_64.whl", hash = "sha256:84176d930aabbdc6ad93021cf416e58af6a88f1c43a5d921f0b02c82c0491cd1"},
|
544 |
+
{file = "onnxruntime-1.12.1-cp310-cp310-win32.whl", hash = "sha256:51a8777018e464b9ba8091c028c53c9f399d64a5994a9ff9f17e88969e62bbe2"},
|
545 |
+
{file = "onnxruntime-1.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:65bdbb27ea50f0f84c2039ea66e97363c6a31022965575bca8e5f220a40b0c5c"},
|
546 |
+
{file = "onnxruntime-1.12.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:3b24c6323e7ae328ede4f76ccf7eb014ce29493cca013edee453e2ff342499b3"},
|
547 |
+
{file = "onnxruntime-1.12.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25179f463e8f641f7f37963dd13e3561f64d0f733287f3e740352ccba440e9f7"},
|
548 |
+
{file = "onnxruntime-1.12.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa5e0653fb7e1a24bb73a378f208b8fd9a7b1622f89f26be093efd93a4fe4f25"},
|
549 |
+
{file = "onnxruntime-1.12.1-cp37-cp37m-manylinux_2_27_x86_64.whl", hash = "sha256:0a376399d21ea070a173c81aae0901012955afd0acc9e5574d7f22d54ceaff65"},
|
550 |
+
{file = "onnxruntime-1.12.1-cp37-cp37m-win32.whl", hash = "sha256:e987ca0206a6dda3d0b70bb3ebee3dc5ff9ea59c6caa7c6586ce5bac87a7f0e3"},
|
551 |
+
{file = "onnxruntime-1.12.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c79b15b9136e68eafc0badc88d306c6c794611857c2b573d9cd8ee1dfaf25619"},
|
552 |
+
{file = "onnxruntime-1.12.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:00b07118bfe8beb44d6028813f14f1bfe4bd7896ac49be3ad9d76102f11ba744"},
|
553 |
+
{file = "onnxruntime-1.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9bd0ab5b99ef0d34331fd871603a3fd5f375fb0518bfc5ca09ce48194a813dfa"},
|
554 |
+
{file = "onnxruntime-1.12.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef3e24a703fb4896bd0e360dfa4fadd6b2b57f64a05b040e01ab717c4e2d5a0c"},
|
555 |
+
{file = "onnxruntime-1.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92d28a7bd547290c0e47d60ca64c52b4976a9bd51622bd83be85bccce316f413"},
|
556 |
+
{file = "onnxruntime-1.12.1-cp38-cp38-manylinux_2_27_x86_64.whl", hash = "sha256:a5c4f5332083dd3815b78ddb16d4a0cf4907a59edd956bcfe53992b71b8feac1"},
|
557 |
+
{file = "onnxruntime-1.12.1-cp38-cp38-win32.whl", hash = "sha256:ff9da60be6c5800dcc10c52dd54aa07ab9a0d86c1e99649881bee9d9838031e0"},
|
558 |
+
{file = "onnxruntime-1.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:f0104e0e8327c8468d646941540af9397b737155dffe078da4bf36da95d1c21e"},
|
559 |
+
{file = "onnxruntime-1.12.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:64152aae1c6ffd74598775c775b86407df7c4aea01f418db672c0d9d86f641f6"},
|
560 |
+
{file = "onnxruntime-1.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8c7caab808df8fa323e1cfaced9785cd068d54701f3bf78ae8733e702a053ff4"},
|
561 |
+
{file = "onnxruntime-1.12.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d9578da310f324eb7fb4014458a50f53e2cbe1eaa98a5ac521675ad7158ca21"},
|
562 |
+
{file = "onnxruntime-1.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ee2f32e4427005c788ed0c081dc74846b7417600705610648cfe7062c2270e8"},
|
563 |
+
{file = "onnxruntime-1.12.1-cp39-cp39-manylinux_2_27_x86_64.whl", hash = "sha256:9c28b8c06df60f986693d35aecc33d9edd494db53ab7915bbe9830c20471d654"},
|
564 |
+
{file = "onnxruntime-1.12.1-cp39-cp39-win32.whl", hash = "sha256:a9954f6ffab4a0a3877a4800d817950a236a6db4901399eec1ea52033f52da94"},
|
565 |
+
{file = "onnxruntime-1.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:76bbd92cbcc5b6b0f893565f072e33f921ae3350a77b74fb7c65757e683516c7"},
|
566 |
+
]
|
567 |
+
packaging = [
|
568 |
+
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
|
569 |
+
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
570 |
+
]
|
571 |
+
Pillow = [
|
572 |
+
{file = "Pillow-9.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a9c9bc489f8ab30906d7a85afac4b4944a572a7432e00698a7239f44a44e6efb"},
|
573 |
+
{file = "Pillow-9.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:510cef4a3f401c246cfd8227b300828715dd055463cdca6176c2e4036df8bd4f"},
|
574 |
+
{file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7888310f6214f19ab2b6df90f3f06afa3df7ef7355fc025e78a3044737fab1f5"},
|
575 |
+
{file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831e648102c82f152e14c1a0938689dbb22480c548c8d4b8b248b3e50967b88c"},
|
576 |
+
{file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc1d2451e8a3b4bfdb9caf745b58e6c7a77d2e469159b0d527a4554d73694d1"},
|
577 |
+
{file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:136659638f61a251e8ed3b331fc6ccd124590eeff539de57c5f80ef3a9594e58"},
|
578 |
+
{file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6e8c66f70fb539301e064f6478d7453e820d8a2c631da948a23384865cd95544"},
|
579 |
+
{file = "Pillow-9.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37ff6b522a26d0538b753f0b4e8e164fdada12db6c6f00f62145d732d8a3152e"},
|
580 |
+
{file = "Pillow-9.2.0-cp310-cp310-win32.whl", hash = "sha256:c79698d4cd9318d9481d89a77e2d3fcaeff5486be641e60a4b49f3d2ecca4e28"},
|
581 |
+
{file = "Pillow-9.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:254164c57bab4b459f14c64e93df11eff5ded575192c294a0c49270f22c5d93d"},
|
582 |
+
{file = "Pillow-9.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:adabc0bce035467fb537ef3e5e74f2847c8af217ee0be0455d4fec8adc0462fc"},
|
583 |
+
{file = "Pillow-9.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:336b9036127eab855beec9662ac3ea13a4544a523ae273cbf108b228ecac8437"},
|
584 |
+
{file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50dff9cc21826d2977ef2d2a205504034e3a4563ca6f5db739b0d1026658e004"},
|
585 |
+
{file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb6259196a589123d755380b65127ddc60f4c64b21fc3bb46ce3a6ea663659b0"},
|
586 |
+
{file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0554af24df2bf96618dac71ddada02420f946be943b181108cac55a7a2dcd4"},
|
587 |
+
{file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:15928f824870535c85dbf949c09d6ae7d3d6ac2d6efec80f3227f73eefba741c"},
|
588 |
+
{file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:bdd0de2d64688ecae88dd8935012c4a72681e5df632af903a1dca8c5e7aa871a"},
|
589 |
+
{file = "Pillow-9.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5b87da55a08acb586bad5c3aa3b86505f559b84f39035b233d5bf844b0834b1"},
|
590 |
+
{file = "Pillow-9.2.0-cp311-cp311-win32.whl", hash = "sha256:b6d5e92df2b77665e07ddb2e4dbd6d644b78e4c0d2e9272a852627cdba0d75cf"},
|
591 |
+
{file = "Pillow-9.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6bf088c1ce160f50ea40764f825ec9b72ed9da25346216b91361eef8ad1b8f8c"},
|
592 |
+
{file = "Pillow-9.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:2c58b24e3a63efd22554c676d81b0e57f80e0a7d3a5874a7e14ce90ec40d3069"},
|
593 |
+
{file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef7592281f7c174d3d6cbfbb7ee5984a671fcd77e3fc78e973d492e9bf0eb3f"},
|
594 |
+
{file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd7b9c7139dc8258d164b55696ecd16c04607f1cc33ba7af86613881ffe4ac8"},
|
595 |
+
{file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a138441e95562b3c078746a22f8fca8ff1c22c014f856278bdbdd89ca36cff1b"},
|
596 |
+
{file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:93689632949aff41199090eff5474f3990b6823404e45d66a5d44304e9cdc467"},
|
597 |
+
{file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:f3fac744f9b540148fa7715a435d2283b71f68bfb6d4aae24482a890aed18b59"},
|
598 |
+
{file = "Pillow-9.2.0-cp37-cp37m-win32.whl", hash = "sha256:fa768eff5f9f958270b081bb33581b4b569faabf8774726b283edb06617101dc"},
|
599 |
+
{file = "Pillow-9.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:69bd1a15d7ba3694631e00df8de65a8cb031911ca11f44929c97fe05eb9b6c1d"},
|
600 |
+
{file = "Pillow-9.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:030e3460861488e249731c3e7ab59b07c7853838ff3b8e16aac9561bb345da14"},
|
601 |
+
{file = "Pillow-9.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:74a04183e6e64930b667d321524e3c5361094bb4af9083db5c301db64cd341f3"},
|
602 |
+
{file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d33a11f601213dcd5718109c09a52c2a1c893e7461f0be2d6febc2879ec2402"},
|
603 |
+
{file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fd6f5e3c0e4697fa7eb45b6e93996299f3feee73a3175fa451f49a74d092b9f"},
|
604 |
+
{file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a647c0d4478b995c5e54615a2e5360ccedd2f85e70ab57fbe817ca613d5e63b8"},
|
605 |
+
{file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:4134d3f1ba5f15027ff5c04296f13328fecd46921424084516bdb1b2548e66ff"},
|
606 |
+
{file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:bc431b065722a5ad1dfb4df354fb9333b7a582a5ee39a90e6ffff688d72f27a1"},
|
607 |
+
{file = "Pillow-9.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1536ad017a9f789430fb6b8be8bf99d2f214c76502becc196c6f2d9a75b01b76"},
|
608 |
+
{file = "Pillow-9.2.0-cp38-cp38-win32.whl", hash = "sha256:2ad0d4df0f5ef2247e27fc790d5c9b5a0af8ade9ba340db4a73bb1a4a3e5fb4f"},
|
609 |
+
{file = "Pillow-9.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:ec52c351b35ca269cb1f8069d610fc45c5bd38c3e91f9ab4cbbf0aebc136d9c8"},
|
610 |
+
{file = "Pillow-9.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ed2c4ef2451de908c90436d6e8092e13a43992f1860275b4d8082667fbb2ffc"},
|
611 |
+
{file = "Pillow-9.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ad2f835e0ad81d1689f1b7e3fbac7b01bb8777d5a985c8962bedee0cc6d43da"},
|
612 |
+
{file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea98f633d45f7e815db648fd7ff0f19e328302ac36427343e4432c84432e7ff4"},
|
613 |
+
{file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7761afe0126d046974a01e030ae7529ed0ca6a196de3ec6937c11df0df1bc91c"},
|
614 |
+
{file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a54614049a18a2d6fe156e68e188da02a046a4a93cf24f373bffd977e943421"},
|
615 |
+
{file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:5aed7dde98403cd91d86a1115c78d8145c83078e864c1de1064f52e6feb61b20"},
|
616 |
+
{file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:13b725463f32df1bfeacbf3dd197fb358ae8ebcd8c5548faa75126ea425ccb60"},
|
617 |
+
{file = "Pillow-9.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:808add66ea764ed97d44dda1ac4f2cfec4c1867d9efb16a33d158be79f32b8a4"},
|
618 |
+
{file = "Pillow-9.2.0-cp39-cp39-win32.whl", hash = "sha256:337a74fd2f291c607d220c793a8135273c4c2ab001b03e601c36766005f36885"},
|
619 |
+
{file = "Pillow-9.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fac2d65901fb0fdf20363fbd345c01958a742f2dc62a8dd4495af66e3ff502a4"},
|
620 |
+
{file = "Pillow-9.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ad2277b185ebce47a63f4dc6302e30f05762b688f8dc3de55dbae4651872cdf3"},
|
621 |
+
{file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c7b502bc34f6e32ba022b4a209638f9e097d7a9098104ae420eb8186217ebbb"},
|
622 |
+
{file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1f14f5f691f55e1b47f824ca4fdcb4b19b4323fe43cc7bb105988cad7496be"},
|
623 |
+
{file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:dfe4c1fedfde4e2fbc009d5ad420647f7730d719786388b7de0999bf32c0d9fd"},
|
624 |
+
{file = "Pillow-9.2.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:f07f1f00e22b231dd3d9b9208692042e29792d6bd4f6639415d2f23158a80013"},
|
625 |
+
{file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1802f34298f5ba11d55e5bb09c31997dc0c6aed919658dfdf0198a2fe75d5490"},
|
626 |
+
{file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17d4cafe22f050b46d983b71c707162d63d796a1235cdf8b9d7a112e97b15bac"},
|
627 |
+
{file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96b5e6874431df16aee0c1ba237574cb6dff1dcb173798faa6a9d8b399a05d0e"},
|
628 |
+
{file = "Pillow-9.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0030fdbd926fb85844b8b92e2f9449ba89607231d3dd597a21ae72dc7fe26927"},
|
629 |
+
{file = "Pillow-9.2.0.tar.gz", hash = "sha256:75e636fd3e0fb872693f23ccb8a5ff2cd578801251f3a4f6854c6a5d437d3c04"},
|
630 |
+
]
|
631 |
+
protobuf = [
|
632 |
+
{file = "protobuf-4.21.7-cp310-abi3-win32.whl", hash = "sha256:c7cb105d69a87416bd9023e64324e1c089593e6dae64d2536f06bcbe49cd97d8"},
|
633 |
+
{file = "protobuf-4.21.7-cp310-abi3-win_amd64.whl", hash = "sha256:3ec85328a35a16463c6f419dbce3c0fc42b3e904d966f17f48bae39597c7a543"},
|
634 |
+
{file = "protobuf-4.21.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:db9056b6a11cb5131036d734bcbf91ef3ef9235d6b681b2fc431cbfe5a7f2e56"},
|
635 |
+
{file = "protobuf-4.21.7-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ca200645d6235ce0df3ccfdff1567acbab35c4db222a97357806e015f85b5744"},
|
636 |
+
{file = "protobuf-4.21.7-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:b019c79e23a80735cc8a71b95f76a49a262f579d6b84fd20a0b82279f40e2cc1"},
|
637 |
+
{file = "protobuf-4.21.7-cp37-cp37m-win32.whl", hash = "sha256:d3f89ccf7182293feba2de2739c8bf34fed1ed7c65a5cf987be00311acac57c1"},
|
638 |
+
{file = "protobuf-4.21.7-cp37-cp37m-win_amd64.whl", hash = "sha256:a74d96cd960b87b4b712797c741bb3ea3a913f5c2dc4b6cbe9c0f8360b75297d"},
|
639 |
+
{file = "protobuf-4.21.7-cp38-cp38-win32.whl", hash = "sha256:8e09d1916386eca1ef1353767b6efcebc0a6859ed7f73cb7fb974feba3184830"},
|
640 |
+
{file = "protobuf-4.21.7-cp38-cp38-win_amd64.whl", hash = "sha256:9e355f2a839d9930d83971b9f562395e13493f0e9211520f8913bd11efa53c02"},
|
641 |
+
{file = "protobuf-4.21.7-cp39-cp39-win32.whl", hash = "sha256:f370c0a71712f8965023dd5b13277444d3cdfecc96b2c778b0e19acbfd60df6e"},
|
642 |
+
{file = "protobuf-4.21.7-cp39-cp39-win_amd64.whl", hash = "sha256:9643684232b6b340b5e63bb69c9b4904cdd39e4303d498d1a92abddc7e895b7f"},
|
643 |
+
{file = "protobuf-4.21.7-py2.py3-none-any.whl", hash = "sha256:8066322588d4b499869bf9f665ebe448e793036b552f68c585a9b28f1e393f66"},
|
644 |
+
{file = "protobuf-4.21.7-py3-none-any.whl", hash = "sha256:58b81358ec6c0b5d50df761460ae2db58405c063fd415e1101209221a0a810e1"},
|
645 |
+
{file = "protobuf-4.21.7.tar.gz", hash = "sha256:71d9dba03ed3432c878a801e2ea51e034b0ea01cf3a4344fb60166cb5f6c8757"},
|
646 |
+
]
|
647 |
+
pyparsing = [
|
648 |
+
{file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
|
649 |
+
{file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
|
650 |
+
]
|
651 |
+
pyreadline3 = [
|
652 |
+
{file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"},
|
653 |
+
{file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"},
|
654 |
+
]
|
655 |
+
regex = [
|
656 |
+
{file = "regex-2022.9.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0394265391a86e2bbaa7606e59ac71bd9f1edf8665a59e42771a9c9adbf6fd4f"},
|
657 |
+
{file = "regex-2022.9.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86df2049b18745f3cd4b0f4c4ef672bfac4b80ca488e6ecfd2bbfe68d2423a2c"},
|
658 |
+
{file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce331b076b2b013e7d7f07157f957974ef0b0881a808e8a4a4b3b5105aee5d04"},
|
659 |
+
{file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:360ffbc9357794ae41336b681dff1c0463193199dfb91fcad3ec385ea4972f46"},
|
660 |
+
{file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18e503b1e515a10282b3f14f1b3d856194ecece4250e850fad230842ed31227f"},
|
661 |
+
{file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e167d1ccd41d27b7b6655bb7a2dcb1b1eb1e0d2d662043470bd3b4315d8b2b"},
|
662 |
+
{file = "regex-2022.9.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4146cb7ae6029fc83b5c905ec6d806b7e5568dc14297c423e66b86294bad6c39"},
|
663 |
+
{file = "regex-2022.9.13-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a1aec4ae549fd7b3f52ceaf67e133010e2fba1538bf4d5fc5cd162a5e058d5df"},
|
664 |
+
{file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cab548d6d972e1de584161487b2ac1aa82edd8430d1bde69587ba61698ad1cfb"},
|
665 |
+
{file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3d64e1a7e6d98a4cdc8b29cb8d8ed38f73f49e55fbaa737bdb5933db99b9de22"},
|
666 |
+
{file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:67a4c625361db04ae40ef7c49d3cbe2c1f5ff10b5a4491327ab20f19f2fb5d40"},
|
667 |
+
{file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:5d0dd8b06896423211ce18fba0c75dacc49182a1d6514c004b535be7163dca0f"},
|
668 |
+
{file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4318f69b79f9f7d84a7420e97d4bfe872dc767c72f891d4fea5fa721c74685f7"},
|
669 |
+
{file = "regex-2022.9.13-cp310-cp310-win32.whl", hash = "sha256:26df88c9636a0c3f3bd9189dd435850a0c49d0b7d6e932500db3f99a6dd604d1"},
|
670 |
+
{file = "regex-2022.9.13-cp310-cp310-win_amd64.whl", hash = "sha256:6fe1dd1021e0f8f3f454ce2811f1b0b148f2d25bb38c712fec00316551e93650"},
|
671 |
+
{file = "regex-2022.9.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:83cc32a1a2fa5bac00f4abc0e6ce142e3c05d3a6d57e23bd0f187c59b4e1e43b"},
|
672 |
+
{file = "regex-2022.9.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2effeaf50a6838f3dd4d3c5d265f06eabc748f476e8441892645ae3a697e273"},
|
673 |
+
{file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59a786a55d00439d8fae4caaf71581f2aaef7297d04ee60345c3594efef5648a"},
|
674 |
+
{file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b701dbc124558fd2b1b08005eeca6c9160e209108fbcbd00091fcfac641ac7"},
|
675 |
+
{file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dab81cc4d58026861445230cfba27f9825e9223557926e7ec22156a1a140d55c"},
|
676 |
+
{file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0c5cc3d1744a67c3b433dce91e5ef7c527d612354c1f1e8576d9e86bc5c5e2"},
|
677 |
+
{file = "regex-2022.9.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:518272f25da93e02af4f1e94985f5042cec21557ef3591027d0716f2adda5d0a"},
|
678 |
+
{file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8418ee2cb857b83881b8f981e4c636bc50a0587b12d98cb9b947408a3c484fe7"},
|
679 |
+
{file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cfa4c956ff0a977c4823cb3b930b0a4e82543b060733628fec7ab3eb9b1abe37"},
|
680 |
+
{file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a1c4d17879dd4c4432c08a1ca1ab379f12ab54af569e945b6fc1c4cf6a74ca45"},
|
681 |
+
{file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:77c2879d3ba51e5ca6c2b47f2dcf3d04a976a623a8fc8236010a16c9e0b0a3c7"},
|
682 |
+
{file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2885ec6eea629c648ecc9bde0837ec6b92208b7f36381689937fe5d64a517e8"},
|
683 |
+
{file = "regex-2022.9.13-cp311-cp311-win32.whl", hash = "sha256:2dda4b096a6f630d6531728a45bd12c67ec3badf44342046dc77d4897277d4f2"},
|
684 |
+
{file = "regex-2022.9.13-cp311-cp311-win_amd64.whl", hash = "sha256:592b9e2e1862168e71d9e612bfdc22c451261967dbd46681f14e76dfba7105fd"},
|
685 |
+
{file = "regex-2022.9.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:df8fe00b60e4717662c7f80c810ba66dcc77309183c76b7754c0dff6f1d42054"},
|
686 |
+
{file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995e70bb8c91d1b99ed2aaf8ec44863e06ad1dfbb45d7df95f76ef583ec323a9"},
|
687 |
+
{file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad75173349ad79f9d21e0d0896b27dcb37bfd233b09047bc0b4d226699cf5c87"},
|
688 |
+
{file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7681c49da1a2d4b905b4f53d86c9ba4506e79fba50c4a664d9516056e0f7dfcc"},
|
689 |
+
{file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bc8edc5f8ef0ebb46f3fa0d02bd825bbe9cc63d59e428ffb6981ff9672f6de1"},
|
690 |
+
{file = "regex-2022.9.13-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bee775ff05c9d519195bd9e8aaaccfe3971db60f89f89751ee0f234e8aeac5"},
|
691 |
+
{file = "regex-2022.9.13-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1a901ce5cd42658ab8f8eade51b71a6d26ad4b68c7cfc86b87efc577dfa95602"},
|
692 |
+
{file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:14a7ab070fa3aec288076eed6ed828587b805ef83d37c9bfccc1a4a7cfbd8111"},
|
693 |
+
{file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d23ac6b4bf9e32fcde5fcdb2e1fd5e7370d6693fcac51ee1d340f0e886f50d1f"},
|
694 |
+
{file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:4cdbfa6d2befeaee0c899f19222e9b20fc5abbafe5e9c43a46ef819aeb7b75e5"},
|
695 |
+
{file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ab07934725e6f25c6f87465976cc69aef1141e86987af49d8c839c3ffd367c72"},
|
696 |
+
{file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d2a1371dc73e921f3c2e087c05359050f3525a9a34b476ebc8130e71bec55e97"},
|
697 |
+
{file = "regex-2022.9.13-cp36-cp36m-win32.whl", hash = "sha256:fcbd1edff1473d90dc5cf4b52d355cf1f47b74eb7c85ba6e45f45d0116b8edbd"},
|
698 |
+
{file = "regex-2022.9.13-cp36-cp36m-win_amd64.whl", hash = "sha256:fe428822b7a8c486bcd90b334e9ab541ce6cc0d6106993d59f201853e5e14121"},
|
699 |
+
{file = "regex-2022.9.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d7430f041755801b712ec804aaf3b094b9b5facbaa93a6339812a8e00d7bd53a"},
|
700 |
+
{file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:079c182f99c89524069b9cd96f5410d6af437e9dca576a7d59599a574972707e"},
|
701 |
+
{file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59bac44b5a07b08a261537f652c26993af9b1bbe2a29624473968dd42fc29d56"},
|
702 |
+
{file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a59d0377e58d96a6f11636e97992f5b51b7e1e89eb66332d1c01b35adbabfe8a"},
|
703 |
+
{file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9d68eb704b24bc4d441b24e4a12653acd07d2c39940548761e0985a08bc1fff"},
|
704 |
+
{file = "regex-2022.9.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0385d66e73cdd4462f3cc42c76a6576ddcc12472c30e02a2ae82061bff132c32"},
|
705 |
+
{file = "regex-2022.9.13-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:db45016364eec9ddbb5af93c8740c5c92eb7f5fc8848d1ae04205a40a1a2efc6"},
|
706 |
+
{file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:03ff695518482b946a6d3d4ce9cbbd99a21320e20d94913080aa3841f880abcd"},
|
707 |
+
{file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6b32b45433df1fad7fed738fe15200b6516da888e0bd1fdd6aa5e50cc16b76bc"},
|
708 |
+
{file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:003a2e1449d425afc817b5f0b3d4c4aa9072dd5f3dfbf6c7631b8dc7b13233de"},
|
709 |
+
{file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a9eb9558e1d0f78e07082d8a70d5c4d631c8dd75575fae92105df9e19c736730"},
|
710 |
+
{file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f6e0321921d2fdc082ef90c1fd0870f129c2e691bfdc4937dcb5cd308aba95c4"},
|
711 |
+
{file = "regex-2022.9.13-cp37-cp37m-win32.whl", hash = "sha256:3f3b4594d564ed0b2f54463a9f328cf6a5b2a32610a90cdff778d6e3e561d08b"},
|
712 |
+
{file = "regex-2022.9.13-cp37-cp37m-win_amd64.whl", hash = "sha256:8aba0d01e3dfd335f2cb107079b07fdddb4cd7fb2d8c8a1986f9cb8ce9246c24"},
|
713 |
+
{file = "regex-2022.9.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:944567bb08f52268d8600ee5bdf1798b2b62ea002cc692a39cec113244cbdd0d"},
|
714 |
+
{file = "regex-2022.9.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b664a4d33ffc6be10996606dfc25fd3248c24cc589c0b139feb4c158053565e"},
|
715 |
+
{file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f06cc1190f3db3192ab8949e28f2c627e1809487e2cfc435b6524c1ce6a2f391"},
|
716 |
+
{file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c57d50d4d5eb0c862569ca3c840eba2a73412f31d9ecc46ef0d6b2e621a592b"},
|
717 |
+
{file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19a4da6f513045f5ba00e491215bd00122e5bd131847586522463e5a6b2bd65f"},
|
718 |
+
{file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a926339356fe29595f8e37af71db37cd87ff764e15da8ad5129bbaff35bcc5a6"},
|
719 |
+
{file = "regex-2022.9.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:091efcfdd4178a7e19a23776dc2b1fafb4f57f4d94daf340f98335817056f874"},
|
720 |
+
{file = "regex-2022.9.13-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:880dbeb6bdde7d926b4d8e41410b16ffcd4cb3b4c6d926280fea46e2615c7a01"},
|
721 |
+
{file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:73b985c9fc09a7896846e26d7b6f4d1fd5a20437055f4ef985d44729f9f928d0"},
|
722 |
+
{file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c0b7cb9598795b01f9a3dd3f770ab540889259def28a3bf9b2fa24d52edecba3"},
|
723 |
+
{file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:37e5a26e76c46f54b3baf56a6fdd56df9db89758694516413757b7d127d4c57b"},
|
724 |
+
{file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:99945ddb4f379bb9831c05e9f80f02f079ba361a0fb1fba1fc3b267639b6bb2e"},
|
725 |
+
{file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dcbcc9e72a791f622a32d17ff5011326a18996647509cac0609a7fc43adc229"},
|
726 |
+
{file = "regex-2022.9.13-cp38-cp38-win32.whl", hash = "sha256:d3102ab9bf16bf541ca228012d45d88d2a567c9682a805ae2c145a79d3141fdd"},
|
727 |
+
{file = "regex-2022.9.13-cp38-cp38-win_amd64.whl", hash = "sha256:14216ea15efc13f28d0ef1c463d86d93ca7158a79cd4aec0f9273f6d4c6bb047"},
|
728 |
+
{file = "regex-2022.9.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9a165a05979e212b2c2d56a9f40b69c811c98a788964e669eb322de0a3e420b4"},
|
729 |
+
{file = "regex-2022.9.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:14c71437ffb89479c89cc7022a5ea2075a842b728f37205e47c824cc17b30a42"},
|
730 |
+
{file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee7045623a5ace70f3765e452528b4c1f2ce669ed31959c63f54de64fe2f6ff7"},
|
731 |
+
{file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6e521d9db006c5e4a0f8acfef738399f72b704913d4e083516774eb51645ad7c"},
|
732 |
+
{file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86548b8234b2be3985dbc0b385e35f5038f0f3e6251464b827b83ebf4ed90e5"},
|
733 |
+
{file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b39ee3b280e15824298b97cec3f7cbbe6539d8282cc8a6047a455b9a72c598"},
|
734 |
+
{file = "regex-2022.9.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6e6e61e9a38b6cc60ca3e19caabc90261f070f23352e66307b3d21a24a34aaf"},
|
735 |
+
{file = "regex-2022.9.13-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d837ccf3bd2474feabee96cd71144e991472e400ed26582edc8ca88ce259899c"},
|
736 |
+
{file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6adfe300848d61a470ec7547adc97b0ccf86de86a99e6830f1d8c8d19ecaf6b3"},
|
737 |
+
{file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d5b003d248e6f292475cd24b04e5f72c48412231961a675edcb653c70730e79e"},
|
738 |
+
{file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d5edd3eb877c9fc2e385173d4a4e1d792bf692d79e25c1ca391802d36ecfaa01"},
|
739 |
+
{file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:50e764ffbd08b06aa8c4e86b8b568b6722c75d301b33b259099f237c46b2134e"},
|
740 |
+
{file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6d43bd402b27e0e7eae85c612725ba1ce7798f20f6fab4e8bc3de4f263294f03"},
|
741 |
+
{file = "regex-2022.9.13-cp39-cp39-win32.whl", hash = "sha256:7fcf7f94ccad19186820ac67e2ec7e09e0ac2dac39689f11cf71eac580503296"},
|
742 |
+
{file = "regex-2022.9.13-cp39-cp39-win_amd64.whl", hash = "sha256:322bd5572bed36a5b39952d88e072738926759422498a96df138d93384934ff8"},
|
743 |
+
{file = "regex-2022.9.13.tar.gz", hash = "sha256:f07373b6e56a6f3a0df3d75b651a278ca7bd357a796078a26a958ea1ce0588fd"},
|
744 |
+
]
|
745 |
+
requests = [
|
746 |
+
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
|
747 |
+
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
|
748 |
+
]
|
749 |
+
scikit-learn = [
|
750 |
+
{file = "scikit-learn-1.1.1.tar.gz", hash = "sha256:3e77b71e8e644f86c8b5be7f1c285ef597de4c384961389ee3e9ca36c445b256"},
|
751 |
+
{file = "scikit_learn-1.1.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:102f51797cd8944bf44a038d106848ddf2804f2c1edf7aea45fba81a4fdc4d80"},
|
752 |
+
{file = "scikit_learn-1.1.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:723cdb278b1fa57a55f68945bc4e501a2f12abe82f76e8d21e1806cbdbef6fc5"},
|
753 |
+
{file = "scikit_learn-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33cf061ed0b79d647a3e4c3f6c52c412172836718a7cd4d11c1318d083300133"},
|
754 |
+
{file = "scikit_learn-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47464c110eaa9ed9d1fe108cb403510878c3d3a40f110618d2a19b2190a3e35c"},
|
755 |
+
{file = "scikit_learn-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:542ccd2592fe7ad31f5c85fed3a3deb3e252383960a85e4b49a629353fffaba4"},
|
756 |
+
{file = "scikit_learn-1.1.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:3be10d8d325821ca366d4fe7083d87c40768f842f54371a9c908d97c45da16fc"},
|
757 |
+
{file = "scikit_learn-1.1.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:b2db720e13e697d912a87c1a51194e6fb085dc6d8323caa5ca51369ca6948f78"},
|
758 |
+
{file = "scikit_learn-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e851f8874398dcd50d1e174e810e9331563d189356e945b3271c0e19ee6f4d6f"},
|
759 |
+
{file = "scikit_learn-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b928869072366dc138762fe0929e7dc88413f8a469aebc6a64adc10a9226180c"},
|
760 |
+
{file = "scikit_learn-1.1.1-cp38-cp38-win32.whl", hash = "sha256:e9d228ced1214d67904f26fb820c8abbea12b2889cd4aa8cda20a4ca0ed781c1"},
|
761 |
+
{file = "scikit_learn-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:f2d5b5d6e87d482e17696a7bfa03fe9515fdfe27e462a4ad37f3d7774a5e2fd6"},
|
762 |
+
{file = "scikit_learn-1.1.1-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:0403ad13f283e27d43b0ad875f187ec7f5d964903d92d1ed06c51439560ecea0"},
|
763 |
+
{file = "scikit_learn-1.1.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8fe80df08f5b9cee5dd008eccc672e543976198d790c07e5337f7dfb67eaac05"},
|
764 |
+
{file = "scikit_learn-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ff56d07b9507fbe07ca0f4e5c8f3e171f74a429f998da03e308166251316b34"},
|
765 |
+
{file = "scikit_learn-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2dad2bfc502344b869d4a3f4aa7271b2a5f4fe41f7328f404844c51612e2c58"},
|
766 |
+
{file = "scikit_learn-1.1.1-cp39-cp39-win32.whl", hash = "sha256:22145b60fef02e597a8e7f061ebc7c51739215f11ce7fcd2ca9af22c31aa9f86"},
|
767 |
+
{file = "scikit_learn-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:45c0f6ae523353f1d99b85469d746f9c497410adff5ba8b24423705b6956a86e"},
|
768 |
+
]
|
769 |
+
scipy = [
|
770 |
+
{file = "scipy-1.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a15a1f3fc0abff33e792d6049161b7795909b40b97c6cc2934ed54384017ab76"},
|
771 |
+
{file = "scipy-1.6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:e79570979ccdc3d165456dd62041d9556fb9733b86b4b6d818af7a0afc15f092"},
|
772 |
+
{file = "scipy-1.6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a423533c55fec61456dedee7b6ee7dce0bb6bfa395424ea374d25afa262be261"},
|
773 |
+
{file = "scipy-1.6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:33d6b7df40d197bdd3049d64e8e680227151673465e5d85723b3b8f6b15a6ced"},
|
774 |
+
{file = "scipy-1.6.1-cp37-cp37m-win32.whl", hash = "sha256:6725e3fbb47da428794f243864f2297462e9ee448297c93ed1dcbc44335feb78"},
|
775 |
+
{file = "scipy-1.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:5fa9c6530b1661f1370bcd332a1e62ca7881785cc0f80c0d559b636567fab63c"},
|
776 |
+
{file = "scipy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd50daf727f7c195e26f27467c85ce653d41df4358a25b32434a50d8870fc519"},
|
777 |
+
{file = "scipy-1.6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f46dd15335e8a320b0fb4685f58b7471702234cba8bb3442b69a3e1dc329c345"},
|
778 |
+
{file = "scipy-1.6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0e5b0ccf63155d90da576edd2768b66fb276446c371b73841e3503be1d63fb5d"},
|
779 |
+
{file = "scipy-1.6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2481efbb3740977e3c831edfd0bd9867be26387cacf24eb5e366a6a374d3d00d"},
|
780 |
+
{file = "scipy-1.6.1-cp38-cp38-win32.whl", hash = "sha256:68cb4c424112cd4be886b4d979c5497fba190714085f46b8ae67a5e4416c32b4"},
|
781 |
+
{file = "scipy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:5f331eeed0297232d2e6eea51b54e8278ed8bb10b099f69c44e2558c090d06bf"},
|
782 |
+
{file = "scipy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8a51d33556bf70367452d4d601d1742c0e806cd0194785914daf19775f0e67"},
|
783 |
+
{file = "scipy-1.6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:83bf7c16245c15bc58ee76c5418e46ea1811edcc2e2b03041b804e46084ab627"},
|
784 |
+
{file = "scipy-1.6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:794e768cc5f779736593046c9714e0f3a5940bc6dcc1dba885ad64cbfb28e9f0"},
|
785 |
+
{file = "scipy-1.6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5da5471aed911fe7e52b86bf9ea32fb55ae93e2f0fac66c32e58897cfb02fa07"},
|
786 |
+
{file = "scipy-1.6.1-cp39-cp39-win32.whl", hash = "sha256:8e403a337749ed40af60e537cc4d4c03febddcc56cd26e774c9b1b600a70d3e4"},
|
787 |
+
{file = "scipy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a5193a098ae9f29af283dcf0041f762601faf2e595c0db1da929875b7570353f"},
|
788 |
+
{file = "scipy-1.6.1.tar.gz", hash = "sha256:c4fceb864890b6168e79b0e714c585dbe2fd4222768ee90bc1aa0f8218691b11"},
|
789 |
+
]
|
790 |
+
skl2onnx = [
|
791 |
+
{file = "skl2onnx-1.13-py2.py3-none-any.whl", hash = "sha256:51011c52d445ecef71967c67522ca7d1a57fc15576556beefeef40895b960830"},
|
792 |
+
{file = "skl2onnx-1.13.tar.gz", hash = "sha256:5f352f6b9b855ffac6305a707f02c7d436f4368938ee9049092a95a3565c273d"},
|
793 |
+
]
|
794 |
+
sympy = [
|
795 |
+
{file = "sympy-1.11.1-py3-none-any.whl", hash = "sha256:938f984ee2b1e8eae8a07b884c8b7a1146010040fccddc6539c54f401c8f6fcf"},
|
796 |
+
{file = "sympy-1.11.1.tar.gz", hash = "sha256:e32380dce63cb7c0108ed525570092fd45168bdae2faa17e528221ef72e88658"},
|
797 |
+
]
|
798 |
+
tabulate = [
|
799 |
+
{file = "tabulate-0.8.10-py3-none-any.whl", hash = "sha256:0ba055423dbaa164b9e456abe7920c5e8ed33fcc16f6d1b2f2d152c8e1e8b4fc"},
|
800 |
+
{file = "tabulate-0.8.10.tar.gz", hash = "sha256:6c57f3f3dd7ac2782770155f3adb2db0b1a269637e42f27599925e64b114f519"},
|
801 |
+
]
|
802 |
+
threadpoolctl = [
|
803 |
+
{file = "threadpoolctl-3.1.0-py3-none-any.whl", hash = "sha256:8b99adda265feb6773280df41eece7b2e6561b772d21ffd52e372f999024907b"},
|
804 |
+
{file = "threadpoolctl-3.1.0.tar.gz", hash = "sha256:a335baacfaa4400ae1f0d8e3a58d6674d2f8828e3716bb2802c44955ad391380"},
|
805 |
+
]
|
806 |
+
torch = [
|
807 |
+
{file = "torch-1.12.1+cu116-cp310-cp310-linux_x86_64.whl", hash = "sha256:b6bc31244aa2818929fbb30c483c221df471e9d856e805c5a1ff72b131ae9e7b"},
|
808 |
+
{file = "torch-1.12.1+cu116-cp310-cp310-win_amd64.whl", hash = "sha256:832effad8b21109700323a5aa137a2e4bdea711dac3d8491ff542f798dab0101"},
|
809 |
+
{file = "torch-1.12.1+cu116-cp37-cp37m-linux_x86_64.whl", hash = "sha256:fc9b4786ec54be67eaa8b0c7c9999e2f4ae2b89a1c18e41de1515a190440c691"},
|
810 |
+
{file = "torch-1.12.1+cu116-cp37-cp37m-win_amd64.whl", hash = "sha256:bca5a77071d7eb901beb775648b125e6d9279f231d1f23e56530b5a189df8975"},
|
811 |
+
{file = "torch-1.12.1+cu116-cp38-cp38-linux_x86_64.whl", hash = "sha256:dda312901220895087cc83d3665464a3dc171d04460c61c31af463efbfb54896"},
|
812 |
+
{file = "torch-1.12.1+cu116-cp38-cp38-win_amd64.whl", hash = "sha256:b8e8906e770bcad12e67c269e1bcdd7661a8abd96519a4ba643e86440bbcc1bf"},
|
813 |
+
{file = "torch-1.12.1+cu116-cp39-cp39-linux_x86_64.whl", hash = "sha256:7725420dabebfcaf44984edce3283eea91f98f0f7d5874bc68c7a164bd8126e3"},
|
814 |
+
{file = "torch-1.12.1+cu116-cp39-cp39-win_amd64.whl", hash = "sha256:84f031e4ee25d95368d7531aa58e79da9808d3fa53b4b363ea03a2450b6fd0af"},
|
815 |
+
]
|
816 |
+
torchvision = [
|
817 |
+
{file = "torchvision-0.13.1+cu116-cp310-cp310-linux_x86_64.whl", hash = "sha256:0c9a2b605ac30fcf475d60f79ba378af0073a22de585453f8c3dd6c1452ab9bc"},
|
818 |
+
{file = "torchvision-0.13.1+cu116-cp310-cp310-win_amd64.whl", hash = "sha256:ba8b7d3c33f63feb29c7dd8c0db68b735d0c9d924ff4e84121b4b20b17cec7a5"},
|
819 |
+
{file = "torchvision-0.13.1+cu116-cp37-cp37m-linux_x86_64.whl", hash = "sha256:dcf32f6d998493e76ec21a38bbb856b7402295cf7a67fb09ce5bde7e7e725756"},
|
820 |
+
{file = "torchvision-0.13.1+cu116-cp37-cp37m-win_amd64.whl", hash = "sha256:9ec5654c56a22fe420dc0af0ff5cd31105f583fdb0240043ff26a7cfed7e05fb"},
|
821 |
+
{file = "torchvision-0.13.1+cu116-cp38-cp38-linux_x86_64.whl", hash = "sha256:c3ceb2b3f456f0c984af71ef55f8637f178a29dc3e13a66fbb010ceead2891e1"},
|
822 |
+
{file = "torchvision-0.13.1+cu116-cp38-cp38-win_amd64.whl", hash = "sha256:8a4c395bb72cf51eb4318c6861c9a5ea490d48ec36a3d767220ef182445449cb"},
|
823 |
+
{file = "torchvision-0.13.1+cu116-cp39-cp39-linux_x86_64.whl", hash = "sha256:75986abe572138258eb9795cb4cd73f40b2bdf8374fefa1af6ff6bb0dbc972c6"},
|
824 |
+
{file = "torchvision-0.13.1+cu116-cp39-cp39-win_amd64.whl", hash = "sha256:92e4685c6010b6b1c228ebb5fe93105d0a71e5b586483a942e04529a43e0bb42"},
|
825 |
+
]
|
826 |
+
tqdm = [
|
827 |
+
{file = "tqdm-4.64.1-py2.py3-none-any.whl", hash = "sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1"},
|
828 |
+
{file = "tqdm-4.64.1.tar.gz", hash = "sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"},
|
829 |
+
]
|
830 |
+
typing-extensions = [
|
831 |
+
{file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
|
832 |
+
{file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
|
833 |
+
]
|
834 |
+
urllib3 = [
|
835 |
+
{file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
|
836 |
+
{file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
|
837 |
+
]
|
838 |
+
wcwidth = [
|
839 |
+
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
|
840 |
+
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
|
841 |
+
]
|
poetry.toml
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
[virtualenvs]
|
2 |
+
in-project = true
|
pyproject.toml
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[tool.poetry]
|
2 |
+
name = "clip-variants"
|
3 |
+
version = "0.1.0"
|
4 |
+
description = ""
|
5 |
+
authors = ["Miha Lunar <mlunar@gmail.com>"]
|
6 |
+
readme = "README.md"
|
7 |
+
packages = [{include = "clip_variants"}]
|
8 |
+
|
9 |
+
[tool.poetry.dependencies]
|
10 |
+
python = "^3.9"
|
11 |
+
onnxmltools = "^1.11.1"
|
12 |
+
packaging = "^21.3"
|
13 |
+
torch = "^1.12.1"
|
14 |
+
clip = {git = "https://github.com/openai/CLIP.git"}
|
15 |
+
torchvision = "^0.13.1"
|
16 |
+
ftfy = "^6.1.1"
|
17 |
+
regex = "^2022.9.13"
|
18 |
+
tqdm = "^4.64.1"
|
19 |
+
onnxruntime = "^1.12.1"
|
20 |
+
onnxconverter-common = "^1.12.2"
|
21 |
+
tabulate = "^0.8.10"
|
22 |
+
numpy = "^1.23.3"
|
23 |
+
Pillow = "^9.2.0"
|
24 |
+
|
25 |
+
[build-system]
|
26 |
+
requires = ["poetry-core"]
|
27 |
+
build-backend = "poetry.core.masonry.api"
|
28 |
+
|
29 |
+
[[tool.poetry.source]]
|
30 |
+
name = "torch"
|
31 |
+
url = "https://download.pytorch.org/whl/cu116"
|
32 |
+
secondary = true
|
variants.py
ADDED
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import onnx
|
2 |
+
import os
|
3 |
+
import itertools
|
4 |
+
import argparse
|
5 |
+
from onnxconverter_common.float16 import convert_float_to_float16
|
6 |
+
from onnxruntime.quantization import quantize_dynamic, QuantType
|
7 |
+
from multiprocessing import Pool
|
8 |
+
from tabulate import tabulate
|
9 |
+
|
10 |
+
def float16(input, output):
|
11 |
+
model = onnx.load(input)
|
12 |
+
model_f16 = convert_float_to_float16(model)
|
13 |
+
onnx.save(model_f16, output)
|
14 |
+
|
15 |
+
def qint8(input, output):
|
16 |
+
quantize_dynamic(input, output, weight_type=QuantType.QInt8)
|
17 |
+
|
18 |
+
def quint8(input, output):
|
19 |
+
quantize_dynamic(input, output, weight_type=QuantType.QUInt8)
|
20 |
+
|
21 |
+
def print_table(table):
|
22 |
+
print(tabulate(table, headers="keys", tablefmt="github"), "\n")
|
23 |
+
|
24 |
+
def get_file_mb(path):
|
25 |
+
try:
|
26 |
+
stat = os.stat(path)
|
27 |
+
except FileNotFoundError:
|
28 |
+
return "N/A"
|
29 |
+
mb = round(stat.st_size / 1_000_000)
|
30 |
+
return f"{mb}"
|
31 |
+
|
32 |
+
def convert(name, mode, f, markdown):
|
33 |
+
fname = f.__name__
|
34 |
+
input = f"models/clip-{name}-{mode}.onnx"
|
35 |
+
output = f"models/clip-{name}-{mode}-{fname}.onnx"
|
36 |
+
exists = os.path.exists(output)
|
37 |
+
if exists:
|
38 |
+
if not markdown:
|
39 |
+
print(f"{output} exists")
|
40 |
+
else:
|
41 |
+
if not markdown:
|
42 |
+
print(f"{output} converting")
|
43 |
+
f(input, output)
|
44 |
+
if not markdown:
|
45 |
+
print(f"{output} done")
|
46 |
+
return [input, output, name, mode, fname, "✅" if exists else "❌"]
|
47 |
+
|
48 |
+
if __name__ == '__main__':
|
49 |
+
parser = argparse.ArgumentParser(description='Create variants of converted models')
|
50 |
+
parser.add_argument(
|
51 |
+
'--markdown',
|
52 |
+
action='store_true',
|
53 |
+
help='Print markdown tables describing the variants'
|
54 |
+
)
|
55 |
+
args = parser.parse_args()
|
56 |
+
names = [
|
57 |
+
"resnet-50",
|
58 |
+
"resnet-101",
|
59 |
+
"resnet-50x4",
|
60 |
+
"resnet-50x16",
|
61 |
+
"resnet-50x64",
|
62 |
+
"resnet-50",
|
63 |
+
"resnet-50",
|
64 |
+
"resnet-50",
|
65 |
+
"vit-base-patch16",
|
66 |
+
"vit-base-patch32",
|
67 |
+
"vit-large-patch14",
|
68 |
+
"vit-large-patch14-336",
|
69 |
+
]
|
70 |
+
modes = [
|
71 |
+
"visual",
|
72 |
+
"textual"
|
73 |
+
]
|
74 |
+
funcs = [
|
75 |
+
float16,
|
76 |
+
qint8,
|
77 |
+
quint8,
|
78 |
+
]
|
79 |
+
markdown = args.markdown
|
80 |
+
if markdown:
|
81 |
+
print_table({ "Model ID": names })
|
82 |
+
print_table({ "Mode": modes })
|
83 |
+
print_table({ "Data Type": [f.__name__ for f in funcs] })
|
84 |
+
variants = itertools.product(names, modes, funcs, [markdown])
|
85 |
+
|
86 |
+
with Pool(8 if not markdown else 1) as p:
|
87 |
+
variants_table = p.starmap(convert, variants)
|
88 |
+
if markdown:
|
89 |
+
# Insert rows for the original models
|
90 |
+
prev_input = ""
|
91 |
+
variants_table_with_originals = []
|
92 |
+
for row in variants_table:
|
93 |
+
input = row[0]
|
94 |
+
output = row[1]
|
95 |
+
if input != prev_input:
|
96 |
+
prev_input = input
|
97 |
+
variants_table_with_originals.append(
|
98 |
+
row[0:1] + row[2:4] + ["float32 (original)", "✅", get_file_mb(input)]
|
99 |
+
)
|
100 |
+
file_size = get_file_mb(output)
|
101 |
+
variants_table_with_originals.append(row[1:] + [file_size])
|
102 |
+
# Add header
|
103 |
+
variants_table_with_originals.insert(0, ["Path", "Model ID", "Mode", "Data Type", "Available", "Size (MB)"])
|
104 |
+
# Print
|
105 |
+
print(tabulate(variants_table_with_originals, headers="firstrow", tablefmt="github"))
|
106 |
+
else:
|
107 |
+
print("done")
|
108 |
+
|