hf-transformers-bot's picture
Upload folder using huggingface_hub
8993785 verified
raw
history blame
17.9 kB
{
"config": {
"name": "pytorch_generate",
"backend": {
"name": "pytorch",
"version": "2.4.0+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model_type": "gemma",
"model": "google/gemma-2b",
"processor": "google/gemma-2b",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": "static",
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {
"backend": "inductor",
"mode": "reduce-overhead",
"fullgraph": true
},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 2,
"duration": 0,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 7
},
"new_tokens": null,
"memory": true,
"latency": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 128,
"min_new_tokens": 128,
"do_sample": false
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.261056,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.223-211.872.amzn2.x86_64-x86_64-with-glibc2.29",
"processor": "x86_64",
"python_version": "3.8.10",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.4.0",
"optimum_benchmark_commit": null,
"transformers_version": "4.45.0.dev0",
"transformers_commit": "d806fa3e92289876e01ab19c9e19e9264ea1c1a1",
"accelerate_version": "0.34.0.dev0",
"accelerate_commit": null,
"diffusers_version": null,
"diffusers_commit": null,
"optimum_version": "1.22.0.dev0",
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": "0.12.1.dev0",
"peft_commit": null
}
},
"report": {
"load": {
"memory": {
"unit": "MB",
"max_ram": 1319.010304,
"max_global_vram": 6768.033792,
"max_process_vram": 0.0,
"max_reserved": 6138.363904,
"max_allocated": 6060.931072
},
"latency": {
"unit": "s",
"count": 1,
"total": 11.4680869140625,
"mean": 11.4680869140625,
"stdev": 0.0,
"p50": 11.4680869140625,
"p90": 11.4680869140625,
"p95": 11.4680869140625,
"p99": 11.4680869140625,
"values": [
11.4680869140625
]
},
"throughput": null,
"energy": null,
"efficiency": null
},
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1756.086272,
"max_global_vram": 6789.005312,
"max_process_vram": 0.0,
"max_reserved": 6142.558208,
"max_allocated": 5034.594816
},
"latency": {
"unit": "s",
"count": 2,
"total": 0.043132991790771485,
"mean": 0.021566495895385743,
"stdev": 4.188728332519577e-05,
"p50": 0.021566495895385743,
"p90": 0.021600005722045897,
"p95": 0.02160419445037842,
"p99": 0.021607545433044432,
"values": [
0.021608383178710937,
0.021524608612060545
]
},
"throughput": {
"unit": "tokens/s",
"value": 324.5775314615521
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1791.246336,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5036.937728
},
"latency": {
"unit": "s",
"count": 2,
"total": 5.2087939453125,
"mean": 2.60439697265625,
"stdev": 0.00757177734375003,
"p50": 2.60439697265625,
"p90": 2.61045439453125,
"p95": 2.611211572265625,
"p99": 2.611817314453125,
"values": [
2.5968251953125,
2.61196875
]
},
"throughput": {
"unit": "tokens/s",
"value": 48.763687461390134
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 254,
"total": 5.208528909683229,
"mean": 0.020506019329461528,
"stdev": 0.00044100919564279165,
"p50": 0.02068940830230713,
"p90": 0.02089687099456787,
"p95": 0.02101437397003174,
"p99": 0.02142007369995117,
"values": [
0.019934207916259765,
0.019896320343017578,
0.02089472007751465,
0.021132287979125978,
0.020767744064331056,
0.020898815155029296,
0.020690944671630858,
0.019933183670043944,
0.019896320343017578,
0.020728832244873048,
0.02067865562438965,
0.02064896011352539,
0.020677631378173827,
0.02072985649108887,
0.020760576248168947,
0.020786176681518553,
0.020770816802978515,
0.020539392471313478,
0.020760576248168947,
0.020690944671630858,
0.020800512313842775,
0.021098495483398438,
0.020801536560058592,
0.020840448379516603,
0.020686847686767578,
0.02072985649108887,
0.020716543197631835,
0.01982156753540039,
0.01984716796875,
0.019885055541992186,
0.02001408004760742,
0.020945920944213867,
0.019908607482910155,
0.01985945510864258,
0.019834880828857423,
0.01984102439880371,
0.020446207046508787,
0.019854335784912108,
0.019878911972045898,
0.019985408782958985,
0.020376575469970702,
0.01985843276977539,
0.019941375732421874,
0.01984819221496582,
0.019922943115234376,
0.02083635139465332,
0.02082918357849121,
0.020708351135253905,
0.020980735778808594,
0.020792320251464845,
0.020751359939575196,
0.020777984619140624,
0.020782079696655274,
0.020794368743896483,
0.021404672622680664,
0.02085785675048828,
0.02082713508605957,
0.020751359939575196,
0.020695039749145508,
0.020674560546875,
0.02063871955871582,
0.020746240615844725,
0.02065715217590332,
0.02067046356201172,
0.020703231811523438,
0.020733951568603515,
0.020668415069580077,
0.020727807998657227,
0.020742143630981445,
0.020658176422119142,
0.02088140869140625,
0.019903488159179687,
0.01986764717102051,
0.019932159423828123,
0.01985740852355957,
0.019891199111938478,
0.019802112579345704,
0.01993011283874512,
0.01984716796875,
0.01988096046447754,
0.01989017677307129,
0.019919872283935547,
0.019854335784912108,
0.0198492488861084,
0.019864543914794922,
0.01985740852355957,
0.019853311538696287,
0.01987276840209961,
0.01984921646118164,
0.01991372871398926,
0.01986355209350586,
0.01982771110534668,
0.019843072891235353,
0.0198604793548584,
0.02045644760131836,
0.020784128189086915,
0.02070528030395508,
0.0208353271484375,
0.020754432678222655,
0.020710399627685547,
0.020760576248168947,
0.020718591690063477,
0.020406272888183592,
0.020575231552124023,
0.020792320251464845,
0.021131263732910157,
0.02084864044189453,
0.020100095748901366,
0.019900415420532228,
0.020297727584838866,
0.020746240615844725,
0.020642816543579103,
0.020674560546875,
0.020707328796386718,
0.020731903076171874,
0.02064896011352539,
0.02068172836303711,
0.02068889617919922,
0.020940799713134766,
0.02088652801513672,
0.020642816543579103,
0.02071244812011719,
0.02063974380493164,
0.02062233543395996,
0.02065203285217285,
0.020754432678222655,
0.02066022491455078,
0.019893247604370116,
0.01988096046447754,
0.019938304901123048,
0.020754432678222655,
0.02059775924682617,
0.020746240615844725,
0.02058956718444824,
0.02063564872741699,
0.0206561279296875,
0.020632575988769532,
0.02064691162109375,
0.02069196891784668,
0.020708351135253905,
0.02064588737487793,
0.020644863128662108,
0.02127872085571289,
0.020682752609252928,
0.020766719818115235,
0.020958208084106447,
0.020706304550170897,
0.0206561279296875,
0.020709375381469726,
0.020716543197631835,
0.020719615936279297,
0.02068377685546875,
0.02065920066833496,
0.020760576248168947,
0.02149273681640625,
0.020939775466918945,
0.02066739273071289,
0.02064793586730957,
0.020743167877197266,
0.02082713508605957,
0.02066431999206543,
0.02062950325012207,
0.019854335784912108,
0.019853311538696287,
0.01986867141723633,
0.01983078384399414,
0.01981439971923828,
0.01966080093383789,
0.019739648818969727,
0.01988096046447754,
0.01984000015258789,
0.01985536003112793,
0.01983897590637207,
0.019842048645019532,
0.01985843276977539,
0.01991372871398926,
0.019851264953613282,
0.01985945510864258,
0.01988812828063965,
0.01984716796875,
0.01984000015258789,
0.01980620765686035,
0.019818496704101563,
0.01982054328918457,
0.019904512405395508,
0.01984614372253418,
0.01987481689453125,
0.019825664520263672,
0.01987276840209961,
0.019876863479614256,
0.020274175643920898,
0.02022604751586914,
0.020917247772216797,
0.020723712921142577,
0.02083737564086914,
0.020793344497680662,
0.02068992042541504,
0.020762624740600585,
0.020732927322387695,
0.02072985649108887,
0.020739072799682616,
0.020786176681518553,
0.020718591690063477,
0.02087936019897461,
0.020940799713134766,
0.021296127319335938,
0.020984832763671874,
0.020784128189086915,
0.020797439575195312,
0.02067967987060547,
0.0208035831451416,
0.020737024307250978,
0.020766719818115235,
0.020753408432006838,
0.02063871955871582,
0.02087014389038086,
0.02082815933227539,
0.020743167877197266,
0.020924415588378906,
0.020813823699951172,
0.020757503509521484,
0.020781055450439453,
0.02068377685546875,
0.020715520858764647,
0.021437440872192383,
0.021560319900512694,
0.021112831115722656,
0.021021696090698243,
0.02087936019897461,
0.020777984619140624,
0.02101043128967285,
0.020800512313842775,
0.020750335693359375,
0.02068377685546875,
0.02067967987060547,
0.020702207565307617,
0.02068172836303711,
0.020818944931030273,
0.02084659194946289,
0.02089779281616211,
0.021350400924682617,
0.020748287200927733,
0.021202943801879884,
0.02079641532897949,
0.02084147262573242,
0.02085990333557129,
0.020692991256713866,
0.020732927322387695,
0.02066739273071289,
0.020824064254760744,
0.020755456924438476,
0.02077187156677246,
0.02091209602355957,
0.020749311447143554
]
},
"throughput": {
"unit": "tokens/s",
"value": 48.76616879821595
},
"energy": null,
"efficiency": null
}
}
}