File size: 492 Bytes
bd8a184
 
 
 
 
 
b49c859
236f61a
 
bd8a184
 
 
 
2144a50
 
bd8a184
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
{
    "inference": {
        "model": [
            {
                "id": "gpt2",
                "instanceType": "ml.g5.12xlarge",
                "numGpu": 4,
                "containerStartupHealthCheckTimeout": 900,
                "isQuantized": "gptq"
            },
            {
                "id": "test/gpt2",
                "instanceType": "ml.g5.12xlarge",
                "numGpu": 4,
                "isQuantized": "gptq"
            }
        ]
    },
    "training": {}
}