Skip to content

Commit e8f269d

Browse files
authored
Merge pull request #31 from enesozeren/makefile_fix
makefile updated for make env
2 parents 21600e1 + 1c0450d commit e8f269d

File tree

3 files changed

+21
-40
lines changed

3 files changed

+21
-40
lines changed

Makefile

+12-15
Original file line numberDiff line numberDiff line change
@@ -17,33 +17,30 @@ PYTHON_INTERPRETER = python
1717
#################################################################################
1818

1919
## Set up python interpreter environment
20-
create_environment:
21-
conda create --name $(PROJECT_NAME) python=$(PYTHON_VERSION) --no-default-packages -y
22-
23-
## Install Python Dependencies
24-
requirements:
25-
$(PYTHON_INTERPRETER) -m pip install -U pip setuptools wheel
26-
$(PYTHON_INTERPRETER) -m pip install -r requirements.txt
27-
$(PYTHON_INTERPRETER) -m pip install -e .
28-
29-
## Install Developer Python Dependencies
30-
dev_requirements: requirements
31-
$(PYTHON_INTERPRETER) -m pip install .["dev"]
20+
conda_environment:
21+
conda env create --file environment.yaml && \
22+
conda run -n mlops_env pip install -r requirements.txt && \
23+
conda run -n mlops_env pip install -r requirements_dev.txt && \
24+
echo 'Conda env ready, activate it with conda activate mlops_env'
3225

3326
## Delete all compiled Python files
3427
clean:
3528
find . -type f -name "*.py[co]" -delete
3629
find . -type d -name "__pycache__" -delete
3730

38-
## Get the data
39-
raw_data:
31+
## Get the data from dvc pull
32+
pull_data:
33+
dvc pull
34+
35+
## Get the data from the repo with script
36+
get_data:
4037
python $(PROJECT_NAME)/data/make_dataset.py
4138

4239
## Train the model
4340
train:
4441
python $(PROJECT_NAME)/train_model.py --config=mlops_project/config/config-defaults.yaml
4542

46-
## Run the api
43+
## Run the api locally
4744
api:
4845
uvicorn --port 8000 api.main:app
4946

README.md

+6
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,12 @@ Currently large language models has the state-of-the-art results for most NLP ta
6969
└── utils_functions.py
7070
```
7171

72+
## Conda environment
73+
To create conda environment with the requirements of this repository, simply use
74+
```bash
75+
make conda_environment
76+
```
77+
7278
## Dataset
7379
To get the dataset and trained model weights, use
7480
```bash

environment.yaml

+3-25
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,6 @@
1-
name: mlops_base
1+
name: mlops_env
22
channels:
33
- conda-forge
44
dependencies:
5-
- _libgcc_mutex=0.1=conda_forge
6-
- _openmp_mutex=4.5=2_gnu
7-
- bzip2=1.0.8=hd590300_5
8-
- ca-certificates=2024.7.4=hbcca054_0
9-
- ld_impl_linux-64=2.40=hf3520f5_7
10-
- libexpat=2.6.2=h59595ed_0
11-
- libffi=3.4.2=h7f98852_5
12-
- libgcc-ng=14.1.0=h77fa898_0
13-
- libgomp=14.1.0=h77fa898_0
14-
- libnsl=2.0.1=hd590300_0
15-
- libsqlite=3.46.0=hde9e2c9_0
16-
- libuuid=2.38.1=h0b41bf4_0
17-
- libxcrypt=4.4.36=hd590300_1
18-
- libzlib=1.3.1=h4ab18f5_1
19-
- ncurses=6.5=h59595ed_0
20-
- openssl=3.3.1=h4ab18f5_1
21-
- pip=24.0=pyhd8ed1ab_0
22-
- python=3.12.3=hab00c5b_0_cpython
23-
- readline=8.2=h8228510_1
24-
- setuptools=70.1.1=pyhd8ed1ab_0
25-
- tk=8.6.13=noxft_h4845f30_101
26-
- tzdata=2024a=h0c530f3_0
27-
- wheel=0.43.0=pyhd8ed1ab_1
28-
- xz=5.2.6=h166bdaf_0
5+
- pip=24.0
6+
- python=3.12.3

0 commit comments

Comments
 (0)