BTGFM commited on
Commit
763ca89
1 Parent(s): e1b75a3

Training in progress, step 20

Browse files
adapter_config.json CHANGED
@@ -10,20 +10,20 @@
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
- "lora_alpha": 600,
14
- "lora_dropout": 0.0,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
- "r": 300,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "gate_up_proj",
24
  "down_proj",
25
  "o_proj",
26
- "qkv_proj"
 
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
 
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
+ "lora_alpha": 512,
14
+ "lora_dropout": 0.1,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
+ "r": 256,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
23
  "down_proj",
24
  "o_proj",
25
+ "qkv_proj",
26
+ "gate_up_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c53f6184a9f2126abb1e954a6762551fbe94382e006be48b1893ce4018e65e80
3
- size 1887472264
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10355443d3a37dc3b37fd9ea4f74765b0e5f185105695beabb5fd88e088c1c9c
3
+ size 1610648152
emissions.csv ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ timestamp,project_name,run_id,duration,emissions,emissions_rate,cpu_power,gpu_power,ram_power,cpu_energy,gpu_energy,ram_energy,energy_consumed,country_name,country_iso_code,region,cloud_provider,cloud_region,os,python_version,codecarbon_version,cpu_count,cpu_model,gpu_count,gpu_model,longitude,latitude,ram_total_size,tracking_mode,on_cloud,pue
2
+ 2024-08-22T16:46:25,codecarbon,2dcea261-fee7-4c8f-bbad-12023a91ded6,3888.2223563194275,0.05984410271752748,1.539112150318883e-05,42.5,36.981330401532624,5.7858710289001465,0.0458992787367768,0.232181822411976,0.00624643351635387,0.28432753466510685,United States,USA,new york,,,Linux-5.10.102.1-microsoft-standard-WSL2-x86_64-with-glibc2.39,3.12.3,2.3.5,16,AMD Ryzen 7 7800X3D 8-Core Processor,1,1 x NVIDIA GeForce RTX 4090,-74.1184,40.5697,15.42898941040039,machine,N,1.0
runs/Aug22_15-41-14_B650E/events.out.tfevents.1724355697.B650E.15666.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:27a3f150e489e447b25232ea144e56079d396cf78b80fc744f50febb35d8d5e1
3
- size 31697
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4cd82c8169e3cb16be60c4d3bf8225720e4cbce37e4057ce1427c1eda7c375a
3
+ size 48668
runs/Aug22_19-04-22_B650E/events.out.tfevents.1724367868.B650E.2966.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c39933337ed15a1113c3d6df046134c699a14ea3f7db642497f75c3daff98e8e
3
+ size 13009
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8f4bb3cb297f45f59f56c85a96df07b7363b4c207d1cc7dc9113b343e9a7e811
3
  size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b53775e71c1b92d2da249d86a1ff524cac4628b9d54aed0cd4a473ef56125f47
3
  size 5240