hf-transformers-bot's picture
Upload folder using huggingface_hub
29ac9c1 verified
raw
history blame
17 kB
{
"config": {
"name": "pytorch_generate",
"backend": {
"name": "pytorch",
"version": "2.3.0+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model": "google/gemma-2b",
"processor": "google/gemma-2b",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"hub_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": "static",
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {
"backend": "inductor",
"mode": "reduce-overhead",
"fullgraph": true
},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 2,
"duration": 0,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 7
},
"new_tokens": null,
"latency": true,
"memory": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 128,
"min_new_tokens": 128,
"do_sample": false
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.29792,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.216-204.855.amzn2.x86_64-x86_64-with-glibc2.29",
"processor": "x86_64",
"python_version": "3.8.10",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.2.1",
"optimum_benchmark_commit": null,
"transformers_version": "4.42.0.dev0",
"transformers_commit": "8d2efea701bb524efb9917bf10657f4fd8021e9a",
"accelerate_version": "0.31.0.dev0",
"accelerate_commit": null,
"diffusers_version": null,
"diffusers_commit": null,
"optimum_version": "1.21.0.dev0",
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": "0.11.2.dev0",
"peft_commit": null
}
},
"report": {
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1754.353664,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5035.912192
},
"latency": {
"unit": "s",
"count": 2,
"total": 0.04423503875732422,
"mean": 0.02211751937866211,
"stdev": 0.0006142559051513664,
"p50": 0.02211751937866211,
"p90": 0.0226089241027832,
"p95": 0.022670349693298338,
"p99": 0.022719490165710447,
"values": [
0.022731775283813475,
0.021503263473510743
]
},
"throughput": {
"unit": "tokens/s",
"value": 316.4911887339976
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1754.374144,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5035.912704
},
"latency": {
"unit": "s",
"count": 2,
"total": 5.080131591796874,
"mean": 2.540065795898437,
"stdev": 0.014590698242187505,
"p50": 2.540065795898437,
"p90": 2.5517383544921874,
"p95": 2.553197424316406,
"p99": 2.554364680175781,
"values": [
2.554656494140625,
2.52547509765625
]
},
"throughput": {
"unit": "tokens/s",
"value": 49.9987048386986
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 253,
"total": 5.080565755844119,
"mean": 0.02008128757250639,
"stdev": 0.0014307271893491428,
"p50": 0.01983590316772461,
"p90": 0.02024304656982422,
"p95": 0.020713472366333006,
"p99": 0.022801776580810545,
"values": [
0.02182655906677246,
0.022700031280517577,
0.022912000656127928,
0.021621759414672852,
0.021514240264892577,
0.02024448013305664,
0.020204544067382812,
0.01987379264831543,
0.01982259178161621,
0.019803136825561524,
0.02006630325317383,
0.019801088333129883,
0.019818496704101563,
0.0198154239654541,
0.019931135177612306,
0.020262912750244142,
0.020237312316894532,
0.02022400093078613,
0.020180992126464844,
0.020185087203979494,
0.020163583755493163,
0.020199424743652345,
0.020166656494140626,
0.020132863998413086,
0.02019430351257324,
0.02021887969970703,
0.02021990394592285,
0.020175872802734376,
0.020179967880249023,
0.020118528366088868,
0.020569087982177735,
0.02032640075683594,
0.020281343460083007,
0.02019430351257324,
0.020197376251220703,
0.020179967880249023,
0.02017791938781738,
0.02020249557495117,
0.020222976684570314,
0.020184064865112306,
0.020180992126464844,
0.02020966339111328,
0.020198400497436524,
0.020176895141601564,
0.020173824310302735,
0.020123647689819335,
0.02128281593322754,
0.023988224029541014,
0.02083430480957031,
0.020541439056396483,
0.020386816024780274,
0.02104115104675293,
0.020734975814819336,
0.019946495056152345,
0.019826688766479493,
0.019922943115234376,
0.019998720169067383,
0.019788799285888673,
0.019808256149291992,
0.01979801559448242,
0.019810304641723633,
0.01984102439880371,
0.01981644821166992,
0.01979903984069824,
0.019765247344970704,
0.01979801559448242,
0.01983078384399414,
0.01981439971923828,
0.01980620765686035,
0.01984511947631836,
0.019788799285888673,
0.01987276840209961,
0.02044211196899414,
0.02039910316467285,
0.01983283233642578,
0.020150272369384766,
0.019895296096801757,
0.019870719909667968,
0.019853311538696287,
0.01983180809020996,
0.01982156753540039,
0.0198287353515625,
0.019785728454589844,
0.019810304641723633,
0.019781631469726564,
0.01978265571594238,
0.019745792388916016,
0.01983590316772461,
0.019746816635131836,
0.019808256149291992,
0.019811328887939454,
0.01981644821166992,
0.019758079528808595,
0.019762176513671875,
0.019760128021240234,
0.019765247344970704,
0.019767295837402343,
0.019793920516967774,
0.019783679962158202,
0.01979084777832031,
0.01979801559448242,
0.01985228729248047,
0.01982771110534668,
0.0198287353515625,
0.019766271591186522,
0.019780607223510743,
0.019920896530151368,
0.019810304641723633,
0.019791872024536132,
0.019780607223510743,
0.019770368576049805,
0.01988812828063965,
0.019777536392211914,
0.01983078384399414,
0.019812351226806642,
0.019794944763183595,
0.019811328887939454,
0.019801088333129883,
0.01982464027404785,
0.01983180809020996,
0.019834880828857423,
0.01984614372253418,
0.01983795166015625,
0.01982259178161621,
0.01985638427734375,
0.01992192077636719,
0.04149555206298828,
0.01983283233642578,
0.019939327239990236,
0.019801088333129883,
0.019778560638427735,
0.01984614372253418,
0.019817472457885742,
0.01982259178161621,
0.019808256149291992,
0.01982464027404785,
0.019771392822265626,
0.0202608642578125,
0.019914751052856446,
0.0198287353515625,
0.019826688766479493,
0.0198656005859375,
0.01983692741394043,
0.01980723190307617,
0.019786752700805665,
0.019801088333129883,
0.019769344329833984,
0.019786752700805665,
0.01982464027404785,
0.019811328887939454,
0.019787776947021486,
0.019931135177612306,
0.01988812828063965,
0.01984511947631836,
0.019808256149291992,
0.01980620765686035,
0.019826688766479493,
0.01985536003112793,
0.01980620765686035,
0.019796991348266603,
0.01983897590637207,
0.0198604793548584,
0.019834880828857423,
0.019809280395507813,
0.01979903984069824,
0.01983795166015625,
0.01982054328918457,
0.019812351226806642,
0.01982771110534668,
0.019819520950317384,
0.01979801559448242,
0.01979903984069824,
0.02004787254333496,
0.019923967361450197,
0.01987379264831543,
0.01983795166015625,
0.019802112579345704,
0.01983283233642578,
0.019780607223510743,
0.01983897590637207,
0.019804159164428712,
0.01984614372253418,
0.019811328887939454,
0.01982156753540039,
0.01982156753540039,
0.0198656005859375,
0.01987174415588379,
0.01985740852355957,
0.019761152267456054,
0.019792896270751953,
0.019876863479614256,
0.019813375473022463,
0.019773439407348634,
0.019795967102050782,
0.019773439407348634,
0.019802112579345704,
0.01985024070739746,
0.020447231292724608,
0.019977216720581056,
0.019796991348266603,
0.01978265571594238,
0.019985408782958985,
0.019955711364746095,
0.019899391174316407,
0.019908607482910155,
0.019917823791503905,
0.01985228729248047,
0.01983078384399414,
0.01982156753540039,
0.01991372871398926,
0.01984511947631836,
0.01986355209350586,
0.019854335784912108,
0.019809280395507813,
0.01979084777832031,
0.01984819221496582,
0.01985638427734375,
0.019805183410644533,
0.019765247344970704,
0.01982156753540039,
0.019791872024536132,
0.01979903984069824,
0.01980620765686035,
0.019792896270751953,
0.019755008697509766,
0.02069196891784668,
0.020699136734008788,
0.02007961654663086,
0.01985228729248047,
0.01984511947631836,
0.019854335784912108,
0.019819520950317384,
0.019771392822265626,
0.019800064086914062,
0.01987379264831543,
0.019870719909667968,
0.01986764717102051,
0.019903488159179687,
0.01983897590637207,
0.01985536003112793,
0.019834880828857423,
0.019893247604370116,
0.01978982353210449,
0.019791872024536132,
0.019784704208374023,
0.020237312316894532,
0.021776384353637695,
0.020741119384765624,
0.019945472717285157,
0.01987379264831543,
0.01984511947631836,
0.019795967102050782,
0.01983795166015625
]
},
"throughput": {
"unit": "tokens/s",
"value": 49.797603683994645
},
"energy": null,
"efficiency": null
}
}
}