@inproceedings{10b0f582547b45c8bf6fc9489260edfc,
title = "Contrast is all you need",
abstract = "In this study, we analyze data-scarce classification scenarios, where available labeled legal data is small and imbalanced,potentially hurting the quality of the results. We focused on two finetuning objectives; SetFit (Sentence TransformerFinetuning), a contrastive learning setup, and a vanilla finetuning setup on a legal provision classification task. Additionally,we compare the features that are extracted with LIME (Local Interpretable Model-agnostic Explanations) to see whichparticular features contributed to the model{\textquoteright}s classification decisions. The results show that a contrastive setup with SetFitperformed better than vanilla finetuning while using a fraction of the training samples. LIME results show that the contrastivelearning approach helps boost both positive and negative features which are legally informative and contribute to theclassification results. Thus a model finetuned with a contrastive objective seems to base its decisions more confidently onlegally informative features.",
keywords = "LegalNLP, Contrastive Learning, NLP, Explainable AI",
author = "Burak Kilic and Floris Bex and Albert Gatt",
year = "2023",
language = "English",
series = "CEUR Workshop Proceedings",
publisher = "CEUR",
pages = "72--82",
editor = "Francesca Lagioia and Jack Mumford and Odekerken, {Daphne } and Hannes Westermann",
booktitle = "ASAIL 2023 - Automated Semantic Analysis of Information in Legal Text",
}