ContextualIntentSlotModel.ConfigΒΆ

Component: ContextualIntentSlotModel

class ContextualIntentSlotModel.Config[source]

Bases: JointModel.Config

All Attributes (including base classes)

representation: ContextualIntentSlotRepresentation.Config = ContextualIntentSlotRepresentation.Config()
output_layer: IntentSlotOutputLayer.Config = IntentSlotOutputLayer.Config()
decoder: IntentSlotModelDecoder.Config = IntentSlotModelDecoder.Config()
default_doc_loss_weight: float = 0.2
default_word_loss_weight: float = 0.5

Default JSON

{
    "representation": {
        "load_path": null,
        "save_path": null,
        "freeze": false,
        "shared_module_key": null,
        "seq_representation": {
            "load_path": null,
            "save_path": null,
            "freeze": false,
            "shared_module_key": null,
            "doc_representation": {
                "load_path": null,
                "save_path": null,
                "freeze": false,
                "shared_module_key": null,
                "dropout": 0.4,
                "cnn": {
                    "kernel_num": 100,
                    "kernel_sizes": [
                        3,
                        4
                    ]
                }
            },
            "seq_representation": {
                "BiLSTMDocAttention": {
                    "load_path": null,
                    "save_path": null,
                    "freeze": false,
                    "shared_module_key": null,
                    "dropout": 0.4,
                    "lstm": {
                        "load_path": null,
                        "save_path": null,
                        "freeze": false,
                        "shared_module_key": null,
                        "dropout": 0.4,
                        "lstm_dim": 32,
                        "num_layers": 1,
                        "bidirectional": true
                    },
                    "pooling": {
                        "SelfAttention": {
                            "attn_dimension": 64,
                            "dropout": 0.4
                        }
                    },
                    "mlp_decoder": null
                }
            }
        },
        "joint_representation": {
            "BiLSTMDocSlotAttention": {
                "load_path": null,
                "save_path": null,
                "freeze": false,
                "shared_module_key": null,
                "dropout": 0.4,
                "lstm": {
                    "load_path": null,
                    "save_path": null,
                    "freeze": false,
                    "shared_module_key": null,
                    "dropout": 0.4,
                    "lstm_dim": 32,
                    "num_layers": 1,
                    "bidirectional": true
                },
                "pooling": null,
                "slot_attention": null,
                "doc_mlp_layers": 0,
                "word_mlp_layers": 0
            }
        }
    },
    "output_layer": {
        "load_path": null,
        "save_path": null,
        "freeze": false,
        "shared_module_key": null,
        "doc_output": {
            "load_path": null,
            "save_path": null,
            "freeze": false,
            "shared_module_key": null,
            "loss": {
                "CrossEntropyLoss": {}
            }
        },
        "word_output": {
            "WordTaggingOutputLayer": {
                "load_path": null,
                "save_path": null,
                "freeze": false,
                "shared_module_key": null,
                "loss": {}
            }
        }
    },
    "decoder": {
        "load_path": null,
        "save_path": null,
        "freeze": false,
        "shared_module_key": null,
        "use_doc_probs_in_word": false
    },
    "default_doc_loss_weight": 0.2,
    "default_word_loss_weight": 0.5
}