LightChen2333
commited on
Commit
•
aded56d
1
Parent(s):
971e88a
Upload 3 files
Browse files- config.json +23 -14
config.json
CHANGED
@@ -284,7 +284,7 @@
|
|
284 |
"intent_classifier": {
|
285 |
"_model_target_": "model.decoder.classifier.MLPClassifier",
|
286 |
"dropout_rate": 0.4,
|
287 |
-
"ignore_index":
|
288 |
"input_dim": 384,
|
289 |
"intent_label_num": 17,
|
290 |
"loss_fn": {
|
@@ -293,7 +293,7 @@
|
|
293 |
"mlp": [
|
294 |
{
|
295 |
"_model_target_": "torch.nn.Linear",
|
296 |
-
"in_features":
|
297 |
"out_features": 256
|
298 |
},
|
299 |
{
|
@@ -303,39 +303,40 @@
|
|
303 |
{
|
304 |
"_model_target_": "torch.nn.Linear",
|
305 |
"in_features": 256,
|
306 |
-
"out_features":
|
307 |
}
|
308 |
],
|
309 |
"mode": "token-level-intent",
|
310 |
"multi_threshold": 0.5,
|
311 |
"return_sentence_level": true,
|
312 |
"use_intent": true,
|
313 |
-
"use_multi":
|
314 |
},
|
315 |
"interaction": {
|
316 |
"_model_target_": "model.decoder.interaction.GLGINInteraction",
|
317 |
"alpha": 0.2,
|
318 |
"dropout_rate": 0.4,
|
319 |
"hidden_dim": 256,
|
320 |
-
"input_dim":
|
321 |
"intent_embedding_dim": 64,
|
322 |
-
"intent_label_num":
|
323 |
"num_heads": 8,
|
324 |
"num_layers": 2,
|
325 |
-
"output_dim":
|
326 |
"row_normalized": true,
|
327 |
-
"slot_graph_window": 1
|
|
|
328 |
},
|
329 |
"slot_classifier": {
|
330 |
"_model_target_": "model.decoder.classifier.MLPClassifier",
|
331 |
"dropout_rate": 0.4,
|
332 |
-
"ignore_index":
|
333 |
"input_dim": 384,
|
334 |
"mlp": [
|
335 |
{
|
336 |
"_model_target_": "torch.nn.Linear",
|
337 |
-
"in_features":
|
338 |
-
"out_features":
|
339 |
},
|
340 |
{
|
341 |
"_model_target_": "torch.nn.LeakyReLU",
|
@@ -343,8 +344,8 @@
|
|
343 |
},
|
344 |
{
|
345 |
"_model_target_": "torch.nn.Linear",
|
346 |
-
"in_features":
|
347 |
-
"out_features":
|
348 |
}
|
349 |
],
|
350 |
"mode": "slot",
|
@@ -364,7 +365,8 @@
|
|
364 |
},
|
365 |
"embedding": {
|
366 |
"dropout_rate": 0.4,
|
367 |
-
"embedding_dim": 128
|
|
|
368 |
},
|
369 |
"encoder_name": "self-attention-lstm",
|
370 |
"lstm": {
|
@@ -379,6 +381,13 @@
|
|
379 |
}
|
380 |
},
|
381 |
"return_dict": false,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
382 |
"tokenizer_class": "OpenSLUv1",
|
383 |
"torch_dtype": "float32",
|
384 |
"transformers_version": "4.25.1",
|
|
|
284 |
"intent_classifier": {
|
285 |
"_model_target_": "model.decoder.classifier.MLPClassifier",
|
286 |
"dropout_rate": 0.4,
|
287 |
+
"ignore_index": -100,
|
288 |
"input_dim": 384,
|
289 |
"intent_label_num": 17,
|
290 |
"loss_fn": {
|
|
|
293 |
"mlp": [
|
294 |
{
|
295 |
"_model_target_": "torch.nn.Linear",
|
296 |
+
"in_features": 384,
|
297 |
"out_features": 256
|
298 |
},
|
299 |
{
|
|
|
303 |
{
|
304 |
"_model_target_": "torch.nn.Linear",
|
305 |
"in_features": 256,
|
306 |
+
"out_features": 17
|
307 |
}
|
308 |
],
|
309 |
"mode": "token-level-intent",
|
310 |
"multi_threshold": 0.5,
|
311 |
"return_sentence_level": true,
|
312 |
"use_intent": true,
|
313 |
+
"use_multi": true
|
314 |
},
|
315 |
"interaction": {
|
316 |
"_model_target_": "model.decoder.interaction.GLGINInteraction",
|
317 |
"alpha": 0.2,
|
318 |
"dropout_rate": 0.4,
|
319 |
"hidden_dim": 256,
|
320 |
+
"input_dim": 384,
|
321 |
"intent_embedding_dim": 64,
|
322 |
+
"intent_label_num": 17,
|
323 |
"num_heads": 8,
|
324 |
"num_layers": 2,
|
325 |
+
"output_dim": 64,
|
326 |
"row_normalized": true,
|
327 |
+
"slot_graph_window": 1,
|
328 |
+
"slot_label_num": 111
|
329 |
},
|
330 |
"slot_classifier": {
|
331 |
"_model_target_": "model.decoder.classifier.MLPClassifier",
|
332 |
"dropout_rate": 0.4,
|
333 |
+
"ignore_index": -100,
|
334 |
"input_dim": 384,
|
335 |
"mlp": [
|
336 |
{
|
337 |
"_model_target_": "torch.nn.Linear",
|
338 |
+
"in_features": 64,
|
339 |
+
"out_features": 64
|
340 |
},
|
341 |
{
|
342 |
"_model_target_": "torch.nn.LeakyReLU",
|
|
|
344 |
},
|
345 |
{
|
346 |
"_model_target_": "torch.nn.Linear",
|
347 |
+
"in_features": 64,
|
348 |
+
"out_features": 111
|
349 |
}
|
350 |
],
|
351 |
"mode": "slot",
|
|
|
365 |
},
|
366 |
"embedding": {
|
367 |
"dropout_rate": 0.4,
|
368 |
+
"embedding_dim": 128,
|
369 |
+
"vocab_size": 790
|
370 |
},
|
371 |
"encoder_name": "self-attention-lstm",
|
372 |
"lstm": {
|
|
|
381 |
}
|
382 |
},
|
383 |
"return_dict": false,
|
384 |
+
"tokenizer": {
|
385 |
+
"_align_mode_": "fast",
|
386 |
+
"_padding_side_": "right",
|
387 |
+
"_tokenizer_name_": "word_tokenizer",
|
388 |
+
"add_special_tokens": false,
|
389 |
+
"max_length": 512
|
390 |
+
},
|
391 |
"tokenizer_class": "OpenSLUv1",
|
392 |
"torch_dtype": "float32",
|
393 |
"transformers_version": "4.25.1",
|