{"id":"https://openalex.org/W4415238256","doi":"https://doi.org/10.48550/arxiv.2508.14130","title":"EmoSLLM: Parameter-Efficient Adaptation of LLMs for Speech Emotion Recognition","display_name":"EmoSLLM: Parameter-Efficient Adaptation of LLMs for Speech Emotion Recognition","publication_year":2025,"publication_date":"2025-08-19","ids":{"openalex":"https://openalex.org/W4415238256","doi":"https://doi.org/10.48550/arxiv.2508.14130"},"language":"en","primary_location":{"id":"pmh:oai:arXiv.org:2508.14130","is_oa":true,"landing_page_url":"http://arxiv.org/abs/2508.14130","pdf_url":"https://arxiv.org/pdf/2508.14130","source":{"id":"https://openalex.org/S4393918464","display_name":"ArXiv.org","issn_l":"2331-8422","issn":["2331-8422"],"is_oa":true,"is_in_doaj":false,"is_core":false,"host_organization":null,"host_organization_name":null,"host_organization_lineage":[],"host_organization_lineage_names":[],"type":"repository"},"license":null,"license_id":null,"version":"submittedVersion","is_accepted":false,"is_published":false,"raw_source_name":null,"raw_type":"text"},"type":"preprint","indexed_in":["arxiv","datacite"],"open_access":{"is_oa":true,"oa_status":"green","oa_url":"https://arxiv.org/pdf/2508.14130","any_repository_has_fulltext":true},"authorships":[{"author_position":"first","author":{"id":"https://openalex.org/A5120021285","display_name":"Hugo Thimonier","orcid":null},"institutions":[],"countries":[],"is_corresponding":true,"raw_author_name":"Thimonier, Hugo","raw_affiliation_strings":[],"affiliations":[]},{"author_position":"middle","author":{"id":"https://openalex.org/A5094178949","display_name":"Antony Perzo","orcid":null},"institutions":[],"countries":[],"is_corresponding":false,"raw_author_name":"Perzo, Antony","raw_affiliation_strings":[],"affiliations":[]},{"author_position":"last","author":{"id":"https://openalex.org/A5040347439","display_name":"Renaud S\u00e9guier","orcid":"https://orcid.org/0000-0001-7199-7563"},"institutions":[],"countries":[],"is_corresponding":false,"raw_author_name":"Seguier, Renaud","raw_affiliation_strings":[],"affiliations":[]}],"institutions":[],"countries_distinct_count":0,"institutions_distinct_count":3,"corresponding_author_ids":["https://openalex.org/A5120021285"],"corresponding_institution_ids":[],"apc_list":null,"apc_paid":null,"fwci":null,"has_fulltext":false,"cited_by_count":0,"citation_normalized_percentile":null,"cited_by_percentile_year":null,"biblio":{"volume":null,"issue":null,"first_page":null,"last_page":null},"is_retracted":false,"is_paratext":false,"is_xpac":false,"primary_topic":{"id":"https://openalex.org/T10201","display_name":"Speech Recognition and Synthesis","score":0.9650999903678894,"subfield":{"id":"https://openalex.org/subfields/1702","display_name":"Artificial Intelligence"},"field":{"id":"https://openalex.org/fields/17","display_name":"Computer Science"},"domain":{"id":"https://openalex.org/domains/3","display_name":"Physical Sciences"}},"topics":[{"id":"https://openalex.org/T10201","display_name":"Speech Recognition and Synthesis","score":0.9650999903678894,"subfield":{"id":"https://openalex.org/subfields/1702","display_name":"Artificial Intelligence"},"field":{"id":"https://openalex.org/fields/17","display_name":"Computer Science"},"domain":{"id":"https://openalex.org/domains/3","display_name":"Physical Sciences"}},{"id":"https://openalex.org/T10860","display_name":"Speech and Audio Processing","score":0.9574999809265137,"subfield":{"id":"https://openalex.org/subfields/1711","display_name":"Signal Processing"},"field":{"id":"https://openalex.org/fields/17","display_name":"Computer Science"},"domain":{"id":"https://openalex.org/domains/3","display_name":"Physical Sciences"}},{"id":"https://openalex.org/T10667","display_name":"Emotion and Mood Recognition","score":0.934499979019165,"subfield":{"id":"https://openalex.org/subfields/3205","display_name":"Experimental and Cognitive Psychology"},"field":{"id":"https://openalex.org/fields/32","display_name":"Psychology"},"domain":{"id":"https://openalex.org/domains/2","display_name":"Social Sciences"}}],"keywords":[{"id":"https://openalex.org/keywords/paralanguage","display_name":"Paralanguage","score":0.6777999997138977},{"id":"https://openalex.org/keywords/task","display_name":"Task (project management)","score":0.5374000072479248},{"id":"https://openalex.org/keywords/adaptation","display_name":"Adaptation (eye)","score":0.5248000025749207},{"id":"https://openalex.org/keywords/emotion-recognition","display_name":"Emotion recognition","score":0.5029000043869019},{"id":"https://openalex.org/keywords/representation","display_name":"Representation (politics)","score":0.4779999852180481},{"id":"https://openalex.org/keywords/natural-language","display_name":"Natural language","score":0.4702000021934509},{"id":"https://openalex.org/keywords/modalities","display_name":"Modalities","score":0.4424999952316284},{"id":"https://openalex.org/keywords/natural","display_name":"Natural (archaeology)","score":0.4262000024318695},{"id":"https://openalex.org/keywords/ambiguity","display_name":"Ambiguity","score":0.41110000014305115}],"concepts":[{"id":"https://openalex.org/C133378560","wikidata":"https://www.wikidata.org/wiki/Q1753225","display_name":"Paralanguage","level":2,"score":0.6777999997138977},{"id":"https://openalex.org/C41008148","wikidata":"https://www.wikidata.org/wiki/Q21198","display_name":"Computer science","level":0,"score":0.6507999897003174},{"id":"https://openalex.org/C2780451532","wikidata":"https://www.wikidata.org/wiki/Q759676","display_name":"Task (project management)","level":2,"score":0.5374000072479248},{"id":"https://openalex.org/C139807058","wikidata":"https://www.wikidata.org/wiki/Q352374","display_name":"Adaptation (eye)","level":2,"score":0.5248000025749207},{"id":"https://openalex.org/C2777438025","wikidata":"https://www.wikidata.org/wiki/Q1339090","display_name":"Emotion recognition","level":2,"score":0.5029000043869019},{"id":"https://openalex.org/C2776359362","wikidata":"https://www.wikidata.org/wiki/Q2145286","display_name":"Representation (politics)","level":3,"score":0.4779999852180481},{"id":"https://openalex.org/C195324797","wikidata":"https://www.wikidata.org/wiki/Q33742","display_name":"Natural language","level":2,"score":0.4702000021934509},{"id":"https://openalex.org/C204321447","wikidata":"https://www.wikidata.org/wiki/Q30642","display_name":"Natural language processing","level":1,"score":0.4553000032901764},{"id":"https://openalex.org/C2779903281","wikidata":"https://www.wikidata.org/wiki/Q6888026","display_name":"Modalities","level":2,"score":0.4424999952316284},{"id":"https://openalex.org/C2776608160","wikidata":"https://www.wikidata.org/wiki/Q4785462","display_name":"Natural (archaeology)","level":2,"score":0.4262000024318695},{"id":"https://openalex.org/C28490314","wikidata":"https://www.wikidata.org/wiki/Q189436","display_name":"Speech recognition","level":1,"score":0.41260001063346863},{"id":"https://openalex.org/C2780522230","wikidata":"https://www.wikidata.org/wiki/Q1140419","display_name":"Ambiguity","level":2,"score":0.41110000014305115},{"id":"https://openalex.org/C2776401178","wikidata":"https://www.wikidata.org/wiki/Q12050496","display_name":"Feature (linguistics)","level":2,"score":0.38530001044273376},{"id":"https://openalex.org/C2779439875","wikidata":"https://www.wikidata.org/wiki/Q1078276","display_name":"Natural language understanding","level":3,"score":0.3840000033378601},{"id":"https://openalex.org/C180747234","wikidata":"https://www.wikidata.org/wiki/Q23373","display_name":"Cognitive psychology","level":1,"score":0.38359999656677246},{"id":"https://openalex.org/C154945302","wikidata":"https://www.wikidata.org/wiki/Q11660","display_name":"Artificial intelligence","level":1,"score":0.34139999747276306},{"id":"https://openalex.org/C2778572836","wikidata":"https://www.wikidata.org/wiki/Q380933","display_name":"Space (punctuation)","level":2,"score":0.33959999680519104},{"id":"https://openalex.org/C177148314","wikidata":"https://www.wikidata.org/wiki/Q170084","display_name":"Generalization","level":2,"score":0.33869999647140503},{"id":"https://openalex.org/C81917197","wikidata":"https://www.wikidata.org/wiki/Q628760","display_name":"Selection (genetic algorithm)","level":2,"score":0.3208000063896179},{"id":"https://openalex.org/C137293760","wikidata":"https://www.wikidata.org/wiki/Q3621696","display_name":"Language model","level":2,"score":0.319599986076355},{"id":"https://openalex.org/C2776303644","wikidata":"https://www.wikidata.org/wiki/Q1020499","display_name":"Interfacing","level":2,"score":0.3156000077724457},{"id":"https://openalex.org/C6438553","wikidata":"https://www.wikidata.org/wiki/Q1185804","display_name":"Affective computing","level":2,"score":0.3075000047683716},{"id":"https://openalex.org/C2776445246","wikidata":"https://www.wikidata.org/wiki/Q1792644","display_name":"Style (visual arts)","level":2,"score":0.301800012588501},{"id":"https://openalex.org/C26517878","wikidata":"https://www.wikidata.org/wiki/Q228039","display_name":"Key (lock)","level":2,"score":0.29670000076293945},{"id":"https://openalex.org/C15744967","wikidata":"https://www.wikidata.org/wiki/Q9418","display_name":"Psychology","level":0,"score":0.2865000069141388},{"id":"https://openalex.org/C77618280","wikidata":"https://www.wikidata.org/wiki/Q1155772","display_name":"Scheme (mathematics)","level":2,"score":0.28380000591278076},{"id":"https://openalex.org/C175154964","wikidata":"https://www.wikidata.org/wiki/Q380077","display_name":"Task analysis","level":3,"score":0.27869999408721924},{"id":"https://openalex.org/C2776515129","wikidata":"https://www.wikidata.org/wiki/Q7306218","display_name":"Redress","level":2,"score":0.2732999920845032},{"id":"https://openalex.org/C206310091","wikidata":"https://www.wikidata.org/wiki/Q750859","display_name":"Emotion classification","level":2,"score":0.2685999870300293},{"id":"https://openalex.org/C151989614","wikidata":"https://www.wikidata.org/wiki/Q440370","display_name":"Mel-frequency cepstrum","level":3,"score":0.2572000026702881},{"id":"https://openalex.org/C41895202","wikidata":"https://www.wikidata.org/wiki/Q8162","display_name":"Linguistics","level":1,"score":0.25609999895095825},{"id":"https://openalex.org/C135641252","wikidata":"https://www.wikidata.org/wiki/Q738567","display_name":"Multimodal interaction","level":2,"score":0.2540000081062317},{"id":"https://openalex.org/C23224414","wikidata":"https://www.wikidata.org/wiki/Q176769","display_name":"Hidden Markov model","level":2,"score":0.2535000145435333}],"mesh":[],"locations_count":2,"locations":[{"id":"pmh:oai:arXiv.org:2508.14130","is_oa":true,"landing_page_url":"http://arxiv.org/abs/2508.14130","pdf_url":"https://arxiv.org/pdf/2508.14130","source":{"id":"https://openalex.org/S4393918464","display_name":"ArXiv.org","issn_l":"2331-8422","issn":["2331-8422"],"is_oa":true,"is_in_doaj":false,"is_core":false,"host_organization":null,"host_organization_name":null,"host_organization_lineage":[],"host_organization_lineage_names":[],"type":"repository"},"license":null,"license_id":null,"version":"submittedVersion","is_accepted":false,"is_published":false,"raw_source_name":null,"raw_type":"text"},{"id":"doi:10.48550/arxiv.2508.14130","is_oa":true,"landing_page_url":"https://doi.org/10.48550/arxiv.2508.14130","pdf_url":null,"source":{"id":"https://openalex.org/S4306400194","display_name":"arXiv (Cornell University)","issn_l":null,"issn":null,"is_oa":true,"is_in_doaj":false,"is_core":false,"host_organization":"https://openalex.org/I205783295","host_organization_name":"Cornell University","host_organization_lineage":["https://openalex.org/I205783295"],"host_organization_lineage_names":[],"type":"repository"},"license":"cc-by","license_id":"https://openalex.org/licenses/cc-by","version":null,"is_accepted":false,"is_published":null,"raw_source_name":null,"raw_type":"article"}],"best_oa_location":{"id":"pmh:oai:arXiv.org:2508.14130","is_oa":true,"landing_page_url":"http://arxiv.org/abs/2508.14130","pdf_url":"https://arxiv.org/pdf/2508.14130","source":{"id":"https://openalex.org/S4393918464","display_name":"ArXiv.org","issn_l":"2331-8422","issn":["2331-8422"],"is_oa":true,"is_in_doaj":false,"is_core":false,"host_organization":null,"host_organization_name":null,"host_organization_lineage":[],"host_organization_lineage_names":[],"type":"repository"},"license":null,"license_id":null,"version":"submittedVersion","is_accepted":false,"is_published":false,"raw_source_name":null,"raw_type":"text"},"sustainable_development_goals":[],"awards":[],"funders":[],"has_content":{"grobid_xml":false,"pdf":false},"content_urls":null,"referenced_works_count":0,"referenced_works":[],"related_works":[],"abstract_inverted_index":{"Emotion":[0],"recognition":[1,164],"from":[2],"speech":[3],"is":[4],"a":[5,70,106,134],"challenging":[6],"task":[7],"that":[8,73,167],"requires":[9],"capturing":[10],"both":[11],"linguistic":[12],"and":[13,22,63,79,132],"paralinguistic":[14],"cues,":[15],"with":[16,55,77],"critical":[17],"applications":[18],"in":[19,123,177,195],"human-computer":[20],"interaction":[21],"mental":[23],"health":[24],"monitoring.":[25],"Recent":[26],"works":[27],"have":[28,51],"highlighted":[29],"the":[30,42,101,116,124,130,138,145,178,185],"ability":[31],"of":[32,41,126,187],"Large":[33],"Language":[34],"Models":[35],"(LLMs)":[36],"to":[37,147],"perform":[38],"tasks":[39],"outside":[40],"sole":[43],"natural":[44,127],"language":[45,128],"area.":[46],"In":[47],"particular,":[48],"recent":[49],"approaches":[50],"investigated":[52],"coupling":[53],"LLMs":[54,176],"other":[56],"data":[57],"modalities":[58],"by":[59],"using":[60,91],"pre-trained":[61],"backbones":[62],"different":[64],"fusion":[65],"mechanisms.":[66],"This":[67,190],"work":[68],"proposes":[69],"novel":[71],"approach":[72],"fine-tunes":[74],"an":[75,92],"LLM":[76,111,146],"audio":[78,89,93,118],"text":[80],"representations":[81],"for":[82,199],"emotion":[83,139,163,201],"prediction.":[84],"Our":[85],"method":[86],"first":[87],"extracts":[88],"features":[90,122],"feature":[94],"extractor,":[95],"which":[96],"are":[97],"then":[98],"mapped":[99],"into":[100],"LLM's":[102],"representation":[103],"space":[104],"via":[105],"learnable":[107],"interfacing":[108],"module.":[109],"The":[110],"takes":[112],"as":[113],"input":[114],"(1)":[115],"transformed":[117],"features,":[119],"(2)":[120],"additional":[121],"form":[125],"(e.g.,":[129],"transcript),":[131],"(3)":[133],"textual":[135],"prompt":[136],"describing":[137],"prediction":[140],"task.":[141],"To":[142],"efficiently":[143],"adapt":[144],"this":[148],"multimodal":[149],"task,":[150],"we":[151],"employ":[152],"Low-Rank":[153],"Adaptation":[154],"(LoRA),":[155],"enabling":[156],"parameter-efficient":[157],"fine-tuning.":[158],"Experimental":[159],"results":[160],"on":[161],"standard":[162],"benchmarks":[165],"demonstrate":[166],"our":[168,192],"model":[169],"outperforms":[170],"all":[171],"but":[172],"one":[173],"existing":[174],"Speech-Text":[175],"literature,":[179],"while":[180,203],"requiring":[181],"less":[182],"than":[183],"half":[184],"parameters":[186],"competing":[188],"approaches.":[189],"highlights":[191],"approach's":[193],"effectiveness":[194],"integrating":[196],"multi-modal":[197],"inputs":[198],"speech-based":[200],"understanding":[202],"maintaining":[204],"significant":[205],"computational":[206],"efficiency.":[207]},"counts_by_year":[],"updated_date":"2026-03-07T16:01:11.037858","created_date":"2025-10-16T00:00:00"}
