얕고 넓게

[AI] 환경설정 @Ubuntu 본문

IT/AI.ML

[AI] 환경설정 @Ubuntu

블랙오닉스 2025. 4. 9. 15:53

2025.04.09

curl -fsSL https://ollama.com/install.sh | sh
sudo apt install vim

sudo apt install python3-pip
sudo apt install python3.12-venv
python3 -m venv venv_openwebui
source venv_openwebui/bin/activate
pip install open-webui
open-webui serve

source venv_openwebui/bin/activate
pip install huggingface_hub
huggingface-cli login
huggingface-cli whoami
huggingface-cli download meta-llama/Meta-Llama-3-8B --local-dir ./llama3.1-8B --local-dir-use-symlinks False

sudo apt install nvtop

sudo apt install nvidia-cuda-toolkit

sudo apt install vim-gtk3
########################################################

# 1. Miniconda 설치 스크립트 다운로드
 wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
# 2. 실행 권한 부여
chmod +x Miniconda3-latest-Linux-x86_64.sh
# 3. 설치 실행
./Miniconda3-latest-Linux-x86_64.sh
echo "source ~/miniconda3/etc/profile.d/conda.sh" >> ~/.bashrc
source ~/.bashrc
conda --version
conda create -n llama python=3.10 -y
conda activate llama

# PyTorch 설치 (CUDA 12.1 기준, CUDA 버전에 맞게 조정)
conda install pytorch torchvision torchaudio pytorch-cuda=12.1 -c pytorch -c nvidia
# Hugging Face 관련 패키지
pip install transformers datasets accelerate peft bitsandbytes sentencepiece scipy
python -c "import torch; print(torch.cuda.is_available())"
python -c "import transformers; print(transformers.__version__)"