|
{"tstamp": 1720583926.6994, "type": "chat", "model": "llava-fire", "gen_params": {"temperature": 0.7, "top_p": 1.0, "max_new_tokens": 1024}, "start": 1720583926.678, "finish": 1720583926.6994, "state": {"template_name": "vicuna_v1.1", "system_message": "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.", "roles": ["USER", "ASSISTANT"], "messages": [["USER", "Hello"], ["ASSISTANT", "hello"]], "offset": 0, "conv_id": "a48164214dce453a9ad276401b7d095c", "model_name": "llava-fire", "has_csam_image": false}, "ip": "127.0.0.1"} |
|
{"tstamp": 1720583926.6998, "type": "chat", "model": "llava-original", "gen_params": {"temperature": 0.7, "top_p": 1.0, "max_new_tokens": 1024}, "start": 1720583926.6815, "finish": 1720583926.6998, "state": {"template_name": "vicuna_v1.1", "system_message": "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.", "roles": ["USER", "ASSISTANT"], "messages": [["USER", "Hello"], ["ASSISTANT", "hello"]], "offset": 0, "conv_id": "8bed262a5728409284a7a56a0fe66a75", "model_name": "llava-original", "has_csam_image": false}, "ip": "127.0.0.1"} |
|
{"tstamp": 1720588858.8939, "type": "chat", "model": "llava-fire", "gen_params": {"temperature": 0.7, "top_p": 1.0, "max_new_tokens": 1024}, "start": 1720588858.8843, "finish": 1720588858.8939, "state": {"template_name": "vicuna_v1.1", "system_message": "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.", "roles": ["USER", "ASSISTANT"], "messages": [["USER", "Hello"], ["ASSISTANT", "hello"]], "offset": 0, "conv_id": "d994e3d859c94bddbf0dfcaed6c63079", "model_name": "llava-fire", "has_csam_image": false}, "ip": "46.3.240.105"} |
|
{"tstamp": 1720588858.8951, "type": "chat", "model": "llava-original", "gen_params": {"temperature": 0.7, "top_p": 1.0, "max_new_tokens": 1024}, "start": 1720588858.8863, "finish": 1720588858.8951, "state": {"template_name": "vicuna_v1.1", "system_message": "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.", "roles": ["USER", "ASSISTANT"], "messages": [["USER", "Hello"], ["ASSISTANT", "hello"]], "offset": 0, "conv_id": "e41b47f05f8b44ff9d520c8e94c6e8de", "model_name": "llava-original", "has_csam_image": false}, "ip": "46.3.240.105"} |
|
{"tstamp": 1720589062.1758, "type": "chat", "model": "llava-fire", "gen_params": {"temperature": 0.7, "top_p": 1.0, "max_new_tokens": 1024}, "start": 1720589047.9171, "finish": 1720589062.1758, "state": {"template_name": "vicuna_v1.1", "system_message": "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.", "roles": ["USER", "ASSISTANT"], "messages": [["USER", "Hello"], ["ASSISTANT", "hello"]], "offset": 0, "conv_id": "963f15cd5e224eb8ae02c67ed37b93c4", "model_name": "llava-fire", "has_csam_image": false}, "ip": "46.3.240.105"} |
|
|