File tree 3 files changed +21
-40
lines changed
3 files changed +21
-40
lines changed Original file line number Diff line number Diff line change @@ -17,33 +17,30 @@ PYTHON_INTERPRETER = python
17
17
# ################################################################################
18
18
19
19
# # Set up python interpreter environment
20
- create_environment :
21
- conda create --name $(PROJECT_NAME ) python=$(PYTHON_VERSION ) --no-default-packages -y
22
-
23
- # # Install Python Dependencies
24
- requirements :
25
- $(PYTHON_INTERPRETER ) -m pip install -U pip setuptools wheel
26
- $(PYTHON_INTERPRETER ) -m pip install -r requirements.txt
27
- $(PYTHON_INTERPRETER ) -m pip install -e .
28
-
29
- # # Install Developer Python Dependencies
30
- dev_requirements : requirements
31
- $(PYTHON_INTERPRETER ) -m pip install .[" dev" ]
20
+ conda_environment :
21
+ conda env create --file environment.yaml && \
22
+ conda run -n mlops_env pip install -r requirements.txt && \
23
+ conda run -n mlops_env pip install -r requirements_dev.txt && \
24
+ echo ' Conda env ready, activate it with conda activate mlops_env'
32
25
33
26
# # Delete all compiled Python files
34
27
clean :
35
28
find . -type f -name " *.py[co]" -delete
36
29
find . -type d -name " __pycache__" -delete
37
30
38
- # # Get the data
39
- raw_data :
31
+ # # Get the data from dvc pull
32
+ pull_data :
33
+ dvc pull
34
+
35
+ # # Get the data from the repo with script
36
+ get_data :
40
37
python $(PROJECT_NAME ) /data/make_dataset.py
41
38
42
39
# # Train the model
43
40
train :
44
41
python $(PROJECT_NAME ) /train_model.py --config=mlops_project/config/config-defaults.yaml
45
42
46
- # # Run the api
43
+ # # Run the api locally
47
44
api :
48
45
uvicorn --port 8000 api.main:app
49
46
Original file line number Diff line number Diff line change @@ -69,6 +69,12 @@ Currently large language models has the state-of-the-art results for most NLP ta
69
69
└── utils_functions.py
70
70
```
71
71
72
+ ## Conda environment
73
+ To create conda environment with the requirements of this repository, simply use
74
+ ``` bash
75
+ make conda_environment
76
+ ```
77
+
72
78
## Dataset
73
79
To get the dataset and trained model weights, use
74
80
``` bash
Original file line number Diff line number Diff line change 1
- name : mlops_base
1
+ name : mlops_env
2
2
channels :
3
3
- conda-forge
4
4
dependencies :
5
- - _libgcc_mutex=0.1=conda_forge
6
- - _openmp_mutex=4.5=2_gnu
7
- - bzip2=1.0.8=hd590300_5
8
- - ca-certificates=2024.7.4=hbcca054_0
9
- - ld_impl_linux-64=2.40=hf3520f5_7
10
- - libexpat=2.6.2=h59595ed_0
11
- - libffi=3.4.2=h7f98852_5
12
- - libgcc-ng=14.1.0=h77fa898_0
13
- - libgomp=14.1.0=h77fa898_0
14
- - libnsl=2.0.1=hd590300_0
15
- - libsqlite=3.46.0=hde9e2c9_0
16
- - libuuid=2.38.1=h0b41bf4_0
17
- - libxcrypt=4.4.36=hd590300_1
18
- - libzlib=1.3.1=h4ab18f5_1
19
- - ncurses=6.5=h59595ed_0
20
- - openssl=3.3.1=h4ab18f5_1
21
- - pip=24.0=pyhd8ed1ab_0
22
- - python=3.12.3=hab00c5b_0_cpython
23
- - readline=8.2=h8228510_1
24
- - setuptools=70.1.1=pyhd8ed1ab_0
25
- - tk=8.6.13=noxft_h4845f30_101
26
- - tzdata=2024a=h0c530f3_0
27
- - wheel=0.43.0=pyhd8ed1ab_1
28
- - xz=5.2.6=h166bdaf_0
5
+ - pip=24.0
6
+ - python=3.12.3
You can’t perform that action at this time.
0 commit comments