<?xml version="1.0" encoding="UTF-8" ?>
<modsCollection xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.loc.gov/mods/v3" xmlns:slims="http://slims.web.id" xsi:schemaLocation="http://www.loc.gov/mods/v3 http://www.loc.gov/standards/mods/v3/mods-3-3.xsd">
<mods version="3.3" id="4286">
 <titleInfo>
  <title>ANALISIS SENTIMEN PADA ULASAN PENGGUNA GAME MOBILE LEGENDS DI PLAY STORE MENGGUNAKAN DEEP LEARNING TRANSFORMERS BERT</title>
 </titleInfo>
 <name type="Personal Name" authority="">
  <namePart>Muhammad Khoiril Saipul</namePart>
  <role>
   <roleTerm type="text">Primary Author</roleTerm>
  </role>
 </name>
 <typeOfResource manuscript="no" collection="yes">mixed material</typeOfResource>
 <genre authority="marcgt">bibliography</genre>
 <originInfo>
  <place>
   <placeTerm type="text">BEKASI</placeTerm>
  </place>
  <publisher>Universitas Satya Negara Indonesia</publisher>
  <dateIssued>2025</dateIssued>
 </originInfo>
 <language>
  <languageTerm type="code">id</languageTerm>
  <languageTerm type="text">Indonesia</languageTerm>
 </language>
 <physicalDescription>
  <form authority="gmd">Text</form>
  <extent></extent>
 </physicalDescription>
 <note>The game Mobile legends: Bang Bang generates millions of user reviews&#13;
containing valuable sentiment for developers, but its analysis is hindered by large data&#13;
volumes and the use of complex informal language. This research aims to design and&#13;
implement an automated sentiment analysis system using the deep learning&#13;
Transformers BERT model to classify user reviews into positive, negative, and neutral&#13;
categories. The research method includes collecting 548,250 review data from Kaggle,&#13;
text preprocessing to handle noise, and training the IndoBERT model through a finetuning&#13;
technique. The model's performance was evaluated using accuracy, precision,&#13;
recall, and F1-score metrics. The results indicate that the fine-tuned IndoBERT model&#13;
achieved an accuracy of 83.2%, a significant improvement of +62.8% compared to the&#13;
pre-trained base model, which only reached 20.4%. This success demonstrates that the&#13;
fine-tuning process effectively adapts the model to understand the unique jargon and&#13;
context of game reviews. The entire research workflow is implemented in an interactive&#13;
web application using Streamlit as a proof of concept and a tool for visualizing the&#13;
analysis results.</note>
 <note type="statement of responsibility">Muhammad Khoiril Saipul</note>
 <subject authority="">
  <topic>Teknik Informatika</topic>
 </subject>
 <subject authority="">
  <topic>Mobile Legends</topic>
 </subject>
 <subject authority="">
  <topic>Sentiment Analysis</topic>
 </subject>
 <subject authority="">
  <topic>Deep learning</topic>
 </subject>
 <subject authority="">
  <topic>Transformers BERT</topic>
 </subject>
 <subject authority="">
  <topic>IndoBERT</topic>
 </subject>
 <subject authority="">
  <topic>Natural Language Processing</topic>
 </subject>
 <classification>TI 2025</classification>
 <identifier type="isbn"></identifier>
 <location>
  <physicalLocation>Institutional Repository USNI Universitas Satya Negara Indonesia</physicalLocation>
  <shelfLocator>TI 2025</shelfLocator>
  <holdingSimple>
   <copyInformation>
    <numerationAndChronology type="1">8250233</numerationAndChronology>
    <sublocation>Perpustakaan USNI Kampus B (SKRIPSI)</sublocation>
    <shelfLocator>TI 2025</shelfLocator>
   </copyInformation>
  </holdingSimple>
 </location>
 <slims:digitals>
  <slims:digital_item id="4498" url="" path="/11d545126deec1077df2a8b687aa7611.pdf" mimetype="application/pdf">ANALISIS SENTIMEN PADA ULASAN PENGGUNA GAME MOBILE LEGENDS DI PLAY STORE MENGGUNAKAN DEEP LEARNING TRANSFORMERS BERT</slims:digital_item>
 </slims:digitals>
 <recordInfo>
  <recordIdentifier>4286</recordIdentifier>
  <recordCreationDate encoding="w3cdtf">2025-09-17 13:40:41</recordCreationDate>
  <recordChangeDate encoding="w3cdtf">2025-09-18 14:25:37</recordChangeDate>
  <recordOrigin>machine generated</recordOrigin>
 </recordInfo>
</mods>
</modsCollection>