Inspired by the CS224n course project and some random guys I found on GitHub, I tried implementing GPT-2 with PyTorch and fine-tuning it on downstream tasks.
# Create virtual environment
python -m venv .venv
# Activate virtual environment
.venv/Scripts/activate
# or on Linux
source .venv/bin/activate
# Install requirements
pip install -r requirements.txt# Set python path
$env:PYTHONPATH="src"
# or on Linux
export PYTHONPATH="src"
# Sentiment Analysis
python -m tasks.sentiment train [--help] [--arguments]
python -m tasks.sentiment evaluate [--help] [--arguments]
python -m tasks.sentiment infer [--help] [--arguments]
# Paraphrase Detection
python -m tasks.paraphrase train [--help] [--arguments]
python -m tasks.paraphrase evaluate [--help] [--arguments]
python -m tasks.paraphrase infer [--help] [--arguments]
# Sonnet Generation
python -m tasks.sonnet train [--help] [--arguments]
python -m tasks.sonnet evaluate [--help] [--arguments]
python -m tasks.sonnet generate [--help] [--arguments]