File size: 361 Bytes
e8c4ed3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
dataset: blender
batching: single_image
factor: 0
num_coarse_samples: 64
num_fine_samples: 128
use_viewdirs: true
white_bkgd: true
batch_size: 1024
randomized: true
max_steps: 200000
print_every: 100
render_every: 500
save_every: 5000
use_semantic_loss: true
clip_model_name: openai/clip-vit-base-patch32
clip_output_dtype: float16
sc_loss_every: 16
few_shot: 8