id
stringlengths
1
4
tokens
sequence
ner_tags
sequence
2600
[ "EmbedHalluc", "outperforms", "the", "baseline", "across", "14", "of", "the", "15", "tasks", "with", "an", "average", "improvement", "of", "2.43", "over", "the", "baseline." ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0 ]
2601
[ "Table", "B.1", "shows", "the", "results", "of", "the", "experiments." ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
2602
[ "In", "addition", "to", "the", "experiments", "using", "RoBERTa", "shown", "in", "the", "main", "paper,", "here", "we", "show", "the", "results", "of", "BERT-large-cased", "with", "conventional", "fine-tuning", "as", "a", "further", "check", "on", "robustness", "of", "our", "method", "with", "respect", "to", "the", "choice", "of", "model." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2603
[ "Best", "learning", "rate", "for", "RoBERTa-large", "prompt-based", "fine-tuning." ]
[ 0, 3, 4, 0, 0, 0, 0 ]
2604
[ "Here,", "we", "provide", "best", "learning", "rates", "(LR,", "searched", "6", "as", "discussed", "in", "main", "paper)", "from", "1e−", "for", "Lhalluc", "of", "EmbedHalluc", "for", "each", "task", "used", "in", "RoBERTa-large", "prompt-based", "fine-tuning." ]
[ 0, 0, 0, 0, 3, 4, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ]
2605
[ "References" ]
[ 0 ]
2606
[ "As", "such,", "users", "of", "such", "models,", "specially", "for", "sensitive", "applications,", "should", "be", "aware", "of", "and", "if", "possible", "address", "such", "issues." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2607
[ "However,", "our", "work", "relies", "on", "pre-trained", "language", "models,", "which", "have", "been", "shown", "to", "be", "biased", "in", "prior", "work", "(Liang", "et", "al.,", "2021)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2608
[ "As", "far", "as", "we", "are", "aware,", "our", "proposed", "work", "does", "not", "have", "any", "explicit", "ethical", "concerns." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2609
[ "8", "Ethics", "Statement" ]
[ 0, 0, 0 ]
2610
[ "The", "proposed", "method", "improves", "over", "the", "baselines", "in", "15", "tasks", "and", "outperforms", "a", "common", "augmentation", "method,", "and", "two", "recent", "regularization", "methods." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2611
[ "In", "this", "paper,", "we", "introduce", "an", "embedding", "hallucination", "method", "for", "data", "augmentation", "for", "few-shot", "learning,", "based", "on", "cWGAN." ]
[ 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 11, 12, 0, 0, 0 ]
2612
[ "7", "Conclusion" ]
[ 0, 0 ]
2613
[ "Besides,", "the", "learning", "of", "cWGAN", "requires", "careful", "human", "attention", "to", "maintain", "a", "stable", "training." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2614
[ "While", "EmbedHalluc", "works", "well", "empirically,", "it", "relies", "on", "hallucinating", "non-interpretable", "embeddings", "to", "facilitate", "the", "learning", "process." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2615
[ "6", "Limitations" ]
[ 0, 0 ]
2616
[ "As", "shown", "in", "Table", "5,", "with", "one", "exception,", "our", "method", "largely", "outperforms", "freeLB", "and", "SMART." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1 ]
2617
[ "We", "use", "the", "default", "setting", "for", "SMART." ]
[ 0, 0, 0, 0, 0, 0, 1 ]
2618
[ "In", "addition", "to", "the", "default", "batch", "size", "and", "learning", "rate", "used", "in", "the", "baseline", "fine-tuning", "and", "EmbedHalluc,", "we", "also", "search", "additional", "batch", "sizes", "and", "learning", "rates", "for", "freeLB." ]
[ 0, 0, 0, 0, 0, 3, 4, 0, 3, 4, 0, 0, 0, 1, 2, 0, 1, 0, 0, 0, 0, 3, 4, 0, 3, 4, 0, 1 ]
2619
[ "suggested", "hyper-parameters", "for", "each", "task." ]
[ 0, 0, 0, 0, 0 ]
2620
[ "For", "freeLB,", "we", "use", "the", "publicly", "available", "code", "and" ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0 ]
2621
[ "Comparing", "to", "Adversarial", "Training" ]
[ 0, 0, 0, 0 ]
2622
[ "Adversarial", "training", "adds", "noise", "into", "the", "training", "data", "to", "increase", "the", "robustness", "of", "a", "model." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2623
[ "It", "has", "been", "shown", "that", "adversarial", "training", "can", "also", "improve", "the", "performance", "of", "language", "models." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2624
[ "Here,", "we", "compare", "EmbedHalluc", "to", "two", "recent", "adversarial", "training", "methods,", "freeLB", "(Zhu", "et", "al.,", "2020)", "and", "SMART", "(Jiang", "et", "al.,", "2020)", "adapted", "to", "our", "setting." ]
[ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ]
2625
[ "We", "find", "that", "those", "two", "methods", "fail", "to", "alleviate", "the", "over-fitting", "problem", "in", "such", "extreme", "setting,", "though", "they", "have", "been", "to", "be", "effective", "when", "given", "a", "few", "thousands", "examples." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2626
[ "Results", "for", "BERT-large-cased", "can", "be", "found", "in", "Appendix", "C." ]
[ 0, 0, 1, 0, 0, 0, 0, 0, 0 ]
2627
[ "For", "Re-Init,", "we", "search", "the", "top", "1,2,3,4,5", "layers;", "and", "for", "Mixout,", "we", "search", "mixout", "rate", "from", "0.1,", "0.2,", "...,", "0.9", "and", "report", "their", "best", "results", "in", "Table", "4,", "using", "RoBERTa-large." ]
[ 0, 1, 0, 0, 0, 0, 5, 3, 0, 0, 1, 0, 0, 3, 4, 0, 5, 5, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2628
[ "Since", "we", "adapt", "their", "code", "to", "our", "extreme", "data", "deficient", "setting,", "we", "re-search", "the", "hyper-parameters", "of", "both", "methods", "(including", "their", "suggested", "values)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2629
[ "We", "used", "the", "public", "code", "for", "both", "of", "these", "methods." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2630
[ "We", "further", "compare", "against", "Mixout", "(Lee", "et", "al.,", "2019),", "which", "is", "shown", "to", "be", "an", "effective", "regularization", "when", "fine-tuning", "with", "a", "few", "thousand", "examples." ]
[ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2631
[ "Thus,", "we", "consider", "reinitialization", "(Re-Init)", "of", "top", "layers", "as", "one", "of", "our", "comparisons." ]
[ 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0 ]
2632
[ "Correcting", "bias", "in", "the", "optimizer", "is", "already", "fixed", "by", "the", "default", "optimizer", "in", "Huggingface", "Transformer", "and", "training", "longer", "surely", "will", "lead", "to", "further", "over-fitting", "in", "our", "extreme", "data", "scarce", "scenario." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2633
[ "Zhang", "et", "al.", "(2021)", "find", "that", "fine-tuning", "can", "be", "achieved", "by:", "correcting", "bias", "in", "the", "optimizer,", "re-initialization", "of", "top", "layers,", "and", "training", "longer." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2634
[ "Our", "method", "can", "also", "be", "viewed", "as", "an", "implicit", "regularization", "method." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2635
[ "The", "relatively", "smaller", "improvements", "for", "promptbased", "methods", "may", "be", "due", "to", "the", "inconsistency", "and" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2636
[ "4.6", "Negative", "Results", "from", "Regularizations" ]
[ 0, 0, 0, 0, 0 ]
2637
[ "EmbedHalluc", "is", "still", "competitive", "when", "comparing", "against", "SSL", "which", "assumes", "to", "have", "additional", "64", "examples", "per", "class", "from", "the", "task", "distribution." ]
[ 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2638
[ "Thus,", "we", "observe", "in", "Table", "3", "that", "EmbedHalluc", "is", "overall", "superior", "to", "EDA." ]
[ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ]
2639
[ "Since", "it", "operates", "in", "the", "continuous", "embedding", "space,", "EmbedHalluc", "hallucinates", "diverse", "embeddings", "that", "follow", "the", "distribution", "of", "few-shot", "set." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2640
[ "EDA", "either", "greatly", "change", "the", "sentence", "with", "a", "large", "α", "or", "fails", "to", "introduce", "substantial", "variations", "(which", "is", "crucial", "in", "the", "extreme", "low", "data", "setting)", "of", "inputs", "with", "a", "small", "α." ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3 ]
2641
[ "EDA", "edits", "the", "input", "sentences", "by", "applying", "synonym", "replacement,", "random", "swap,", "random", "deletion", "and", "random", "insertion", "for", "a", "default", "10%", "(α)", "of", "tokens." ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 3, 0, 0 ]
2642
[ "We", "apply", "pseudo-labeling", "(Cascante-Bonilla", "et", "al.,", "2021)", "for", "SSL,", "i.e.,", "we", "first", "fine-tune", "the", "model", "with", "the", "few-shot", "training", "set", "and", "use", "the", "fine-tuned", "model", "to", "pseudo-label", "the", "unlabeled", "data,", "finally", "we", "finetune", "the", "model", "again", "with", "the", "few-shot", "training", "set", "combined", "with", "the", "pseudo-labeled", "set." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2643
[ "We", "also", "consider", "semi-supervised", "learning", "(SSL)", "which", "relies", "on", "unlabeled", "data", "(64", "examples", "per", "class", "in", "our", "experiments)." ]
[ 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2644
[ "Since", "our", "method", "is", "a", "generative", "data", "augmentation", "(DA)", "method,", "we", "compare", "it", "to", "another", "DA", "method", "EDA." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ]
2645
[ "4.5", "Comparing", "to", "EDA", "and", "SSL" ]
[ 0, 0, 0, 1, 0, 1 ]
2646
[ "Whereas,", "in", "conventional", "fine-tuning,", "the", "[CLS]", "token", "is", "always", "appended", "to", "the", "beginning", "of", "shalluc", "and", "the", "classification", "is", "performed", "at", "the", "[CLS]", "token." ]
[ 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2647
[ "randomness", "in", "the", "learning", "process", "since", "we", "have", "to", "insert", "[mask]", "token", "to", "a", "random", "position", "in", "the", "hallucinated", "embedding", "shalluc,", "for", "the", "calculation", "of", "the", "loss." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2648
[ "When", "applying", "to", "prompt-based", "fine-tuning,", "while", "our", "method", "under-performs", "in", "MNLI,", "MNLI-mm", "and", "RTE,", "it", "outperforms", "for", "all", "other", "tasks,", "with", "substantial", "improvements", "over", "the", "baseline", "in", "CoLA,", "TREC,", "QNLI,", "MRPC." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 13, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 13, 13, 13 ]
2649
[ "When", "combining", "with", "LabelCalib,", "our", "method", "outperforms", "in", "all", "tasks." ]
[ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ]
2650
[ "In", "conventional", "fine-tuning,", "EmbedHalluc", "imthe", "baseline", "in", "14", "tasks,", "only", "proves", "over", "marginally", "under-performs", "in", "SST-5", "(40.3", "vs.", "40.6", "of", "baseline)." ]
[ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 9, 0, 9, 0, 0 ]
2651
[ "Our", "Label", "Calibration", "(LabelCalib)", "can", "further", "improve", "the", "results." ]
[ 0, 1, 2, 1, 0, 0, 0, 0, 0 ]
2652
[ "Results", "for", "BERT-large-cased", "can", "be", "found", "in", "Appendix", "B." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2653
[ "We", "compare", "our", "method", "EmbedHalluc", "(w/o", "or", "w/", "LabelCalib)", "using", "RoBERTa-large", "on", "15", "tasks", "with", "two", "fine-tuning", "methods:", "conventional", "(Table", "1)", "and", "prompt-based", "fine-tuning", "(Table", "2)." ]
[ 0, 0, 0, 0, 1, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 2, 0, 0 ]
2654
[ "4.4", "Main", "Results", "on", "15", "Tasks" ]
[ 0, 0, 0, 0, 0, 0 ]
2655
[ "The", "algorithm", "is", "implemented", "in", "PyTorch-1.10", "and", "experiments", "are", "conducted", "on", "Nvidia", "RTX-6000", "and", "RTX-A6000", "GPU." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2656
[ "Finally,", "results", "are", "reported", "by", "testing", "the", "models", "on", "the", "testing", "dataset." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2657
[ "The", "models", "are", "selected", "based", "on", "the", "validation", "accuracy", "every", "100", "steps." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2658
[ "We", "use", "1e−", "the", "same", "search", "for", "EDA", "(Wei", "and", "Zou,", "2019)", "and", "semi-supervised", "pseduo-labeling", "(SSL)", "when", "learning", "with", "additional", "augmented", "or", "pseudo-labeled", "data." ]
[ 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0 ]
2659
[ "Our", "method", "learns", "from", "hallucinated", "embeddings", "with", "a", "grid", "search", "of", "learning", "rate", "of", "6,", "and", "batch", "size", "of", "4,", "6,", "8." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 5, 0, 3, 4, 0, 5, 5, 5 ]
2660
[ "To", "fairly", "compare", "our", "method", "with", "baselines", "and", "other", "methods,", "when", "learning", "with", "real", "sentences,", "5", "(further", "juswe", "use", "the", "same", "learning", "rate", "of", "1e−", "tification", "of", "using", "this", "learning", "rate", "can", "be", "found", "in", "Appendix", "D)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2661
[ "sampling", "the", "hallucinators", "(see", "Algorithm", "1)." ]
[ 0, 0, 0, 0, 0, 0 ]
2662
[ "We", "draw", "two", "mini-batches", "during", "the", "training", "of", "our", "few-shot", "language", "learners,", "i.e.,", "one", "from", "the", "real", "language", "few-shot", "training", "set,", "another", "one", "by" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2663
[ "4.3", "Training", "Details", "for", "Few-Shot", "Language" ]
[ 0, 0, 0, 0, 0, 0 ]
2664
[ "We", "apply", "gradient", "penalty", "with", "weight", "of", "loss", "100", "for", "training", "the", "cWGAN." ]
[ 0, 0, 3, 4, 0, 0, 0, 0, 5, 0, 0, 0, 0 ]
2665
[ "The", "real", "embeddings", "are", "collected", "from", "the", "language", "few-shot", "training", "set", "by", "passing", "text", "into", "the", "embedding", "layer", "of", "the", "language", "model." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2666
[ "We", "train", "the", "Embedding", "Hallucinators", "for", "150", "epochs", "using", "a", "batch", "size", "of", "64,", "the", "Adam", "optimizer", "(β", "=", "(0.5,", "0.999)),", "and", "a", "learning", "rate", "of", "0.0002." ]
[ 0, 0, 0, 0, 0, 0, 5, 3, 0, 0, 3, 4, 0, 5, 0, 0, 0, 3, 0, 5, 5, 0, 0, 3, 4, 0, 5 ]
2667
[ "The", "discriminator", "is", "a", "3blocks", "model,", "each", "bock", "having", "a", "sequence", "of", "FullyConnect-BatchNorm-LeakyReLU", "with", "the", "same", "hidden", "dimension", "of", "512." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 5 ]
2668
[ "L", "is", "set", "to", "be", "128." ]
[ 3, 0, 0, 0, 0, 5 ]
2669
[ "The", "hallucinated", "embeddings,", "i.e.,", "outputs", "of", "the", "generator", "are", "tensors", "of", "L", "1024,", "where", "the", "length", "of", "the", "generated", "embeddings" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 5, 0, 0, 0, 0, 0, 0, 0 ]
2670
[ "The", "hidden", "dimensions", "of", "the", "generator", "are", "128,", "256,", "512,", "1024." ]
[ 0, 3, 4, 0, 0, 0, 0, 5, 5, 5, 5 ]
2671
[ "The", "generator", "is", "a", "4-blocks", "model,", "with", "each", "block", "containing", "a", "FullyConnect", "layer", "followed", "by", "a", "BatchNorm", "and", "LeakyReLU." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2672
[ "The", "training", "of", "Embedding", "Hallucinators", "involves", "training", "a", "generator", "and", "discriminator", "in", "the", "cWGAN", "framework." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2673
[ "4.2", "Training", "Details", "for", "Embedding" ]
[ 0, 0, 0, 0, 0 ]
2674
[ "We", "sample", "16", "examples", "per", "class", "to", "form", "a", "training", "set", "and", "construct", "a", "validation", "set", "with", "the", "same", "size", "as", "the", "training", "set." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2675
[ "The", "evaluations", "are", "conducted", "by", "averaging", "results", "on", "5", "different", "train", "test", "splits." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2676
[ "We", "evaluate", "our", "method", "on", "15", "classification", "tasks." ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
2677
[ "4.1", "Evaluation", "Datasets", "and", "Protocol" ]
[ 0, 0, 0, 0, 0 ]
2678
[ "4", "Experiments" ]
[ 0, 0 ]
2679
[ "Thus,", "our", "method", "has", "about", "×", "computational", "overhead", "compared", "to", "the", "baselines." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2680
[ "Computing", "total", "loss", "one", "additional", "forward", "pass", "of", "the", "hallucinator", "and", "one", "more", "forward", "pass", "and", "backward", "pass", "of", "the", "2", "language", "model." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2681
[ "Note", "that", "baselines", "considered", "in", "this", "paper", "use", "Lhalluc", "requires", "Lreal." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2682
[ "The", "pseudo-code", "for", "finetuning", "of", "few-shot", "language", "learners", "with", "hallucinated", "embeddings", "is", "shown", "in", "Algorithm", "1." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2683
[ "Finally,", "the", "language", "model", "learns", "from", "the", "hallucinated", "emM", "bedding", "by", "KL-divergence" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2684
[ "The", "soft-label", "of", "the", "embedding", "shalluc(ci)", "is", "then", "cpseudo,i", "=", "FGEN0(shalluc(ci))." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2685
[ "We", "propose", "Label", "Calibration", "(LabelCalib)", "by", "pseudoFGEN0", "(LM1", "in", "labeling", "from", "a", "teacher", "model", "Algorithm", "1),", "where", "FGEN0", "is", "first", "fine-tuned", "on", "the", "original", "training", "set", "(without", "augmentation)." ]
[ 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2686
[ "However,", "this", "hard", "label", "may", "not", "best", "represent", "the", "class", "information", "of", "the", "hallucinated", "embedding." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2687
[ "For", "a", "single", "input", "sentence,", "we", "first", "pass", "it", "through", "the", "embedding", "layer", "to", "get", "the", "sentence", "embedding", "ssent." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2688
[ "3.2", "Fine-tuning", "with", "Hallucinated", "Embedding" ]
[ 0, 0, 0, 0, 0 ]
2689
[ "The", "hallucinated", "embeddings", "shalluc,", "in", "principal,", "are", "indiscriminative", "to", "the", "embeddings", "of", "observed", "examples", "in", "that", "class." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2690
[ "After", "the", "training,", "we", "use", "it", "to", "generate", "pseudo-embeddings", "of", "examples", "by", "feeding", "it", "with", "random", "noisy", "vectors", "z", "sampled", "from", "(0,", "1)", "and", "the", "corresponding", "condition", "class", "laN", "bels", "ci." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2691
[ "Our", "hallucinator", "is", "trained", "under", "the", "conditional", "WGAN", "framework." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2692
[ "(Arjovsky", "et", "al.,", "2017)", "uses", "the", "Wasserstein", "distance", "as", "the", "objective", "function", "to", "stabilize", "the", "training", "of", "GAN." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2693
[ "GAN", "(Goodfellow", "et", "al.,", "2014)", "has", "led", "the", "revolution", "of", "generative", "models", "to", "achieve", "impressive", "results", "in", "synthesizing", "images", "(Zhu", "et", "al.,", "2017)", "and", "higher", "dimensional", "data", "(Wang", "et", "al.,", "2020)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2694
[ "3.1", "Conditional", "Wasserstein", "GAN" ]
[ 0, 0, 0, 0 ]
2695
[ "3", "Method" ]
[ 0, 0 ]
2696
[ "Also,", "different", "from", "FDA", "which", "is", "focused", "on", "two", "intent", "classification", "tasks,", "our", "method", "can", "be", "applied", "to", "a", "wide-range", "of", "NLP", "task", "as", "shown", "by", "our", "experiments", "on", "15", "diverse", "tasks." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2697
[ "Our", "method", "shares", "similarity", "to", "FDA", "(Kumar", "et", "al.,", "2019),", "which", "is", "also", "a", "generative", "data", "augmentation", "method,", "but", "at", "the", "feature", "space." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2698
[ "Different", "from", "(Wei", "et", "al.,", "2021)", "which", "uses", "EDA", "(Wei", "and", "Zou,", "2019)", "to", "augment", "examples", "at", "the", "discrete", "input" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2699
[ "Our", "method", "is", "a", "generative", "data", "augmentation", "method", "in", "the", "embedding", "space." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]