Wav2Vec2 ASR Fine tuneing Improvement

I am finetuning my language that uses English Alphabets
Below are my settings. Can anyone help me in tweaking the settings so that I can get better results?

Number of examples in training set: 3739
Number of examples in testing set: 661

from transformers import Wav2Vec2ForCTC

model = Wav2Vec2ForCTC.from_pretrained(
"facebook/wav2vec2-xls-r-300m",
attention_dropout=0.1,
hidden_dropout=0.1,
feat_proj_dropout=0.0,
mask_time_prob=0.05,
layerdrop=0.1,
gradient_checkpointing=True,
ctc_loss_reduction="mean",
ctc_zero_infinity=True,
pad_token_id=processor.tokenizer.pad_token_id,
vocab_size=len(processor.tokenizer)
)

from transformers import TrainingArguments

training_args = TrainingArguments(
output_dir="loaction",
group_by_length=True,
per_device_train_batch_size=8,
gradient_accumulation_steps=8,
evaluation_strategy="steps",
num_train_epochs=200,
gradient_checkpointing=True,
fp16=True,
save_steps=200,
eval_steps=200,
logging_steps=400,
learning_rate=3e-4,
warmup_steps=500,
save_total_limit=2,
push_to_hub=False,
)

[11600/11600 11:26:15, Epoch 198/200]
Step	Training Loss	Validation Loss	Wer
200	No log	2.882267	1.000000
400	3.347200	0.814857	0.646144
600	3.347200	0.443969	0.395896
800	0.652700	0.366413	0.330348
1000	0.652700	0.365178	0.291915
1200	0.345800	0.368005	0.279726
1400	0.345800	0.363631	0.268532
1600	0.236300	0.387128	0.255721
1800	0.236300	0.417593	0.259577
2000	0.177600	0.423806	0.250622
2200	0.177600	0.421441	0.233085
2400	0.147700	0.449364	0.250373
2600	0.147700	0.436085	0.232090
2800	0.122000	0.467937	0.229726
3000	0.122000	0.487380	0.231468
3200	0.107200	0.481970	0.230100
3400	0.107200	0.478980	0.231716
3600	0.092800	0.509161	0.223756
3800	0.092800	0.507937	0.222637
4000	0.079900	0.518146	0.223756
4200	0.079900	0.509740	0.219154
4400	0.076300	0.492435	0.214925
4600	0.076300	0.533796	0.212687
4800	0.065800	0.520204	0.212935
5000	0.065800	0.552162	0.211940
5200	0.060500	0.531908	0.214303
5400	0.060500	0.536912	0.213433
5600	0.055100	0.553004	0.214677
5800	0.055100	0.546174	0.215050
6000	0.050300	0.564660	0.210448
6200	0.050300	0.568010	0.209826
6400	0.048000	0.579118	0.209204
6600	0.048000	0.579771	0.204602
6800	0.042100	0.570514	0.214677
7000	0.042100	0.588499	0.204851
7200	0.039100	0.569614	0.204104
7400	0.039100	0.578806	0.205721
7600	0.036300	0.568099	0.199876
7800	0.036300	0.572734	0.204229
8000	0.033700	0.575987	0.200995
8200	0.033700	0.617643	0.200746
8400	0.032700	0.596163	0.200871
8600	0.032700	0.623386	0.202239
8800	0.027700	0.598604	0.198632
9000	0.027700	0.595945	0.200746
9200	0.026800	0.608260	0.200746
9400	0.026800	0.610333	0.198756
9600	0.024800	0.616911	0.195025
9800	0.024800	0.618447	0.197264
10000	0.023300	0.620055	0.198134
10200	0.023300	0.627929	0.195522
10400	0.022100	0.628579	0.195896
10600	0.022100	0.634180	0.195647
10800	0.020600	0.629673	0.193657
11000	0.020600	0.630992	0.193159
11200	0.019100	0.636925	0.195274
11400	0.019100	0.641024	0.195025
11600	0.017900	0.643567	0.194403