Skip to content

Commit 8143446

Browse files
authored
Merge pull request #39 from facebookresearch/main
Latest sync
2 parents 3485946 + b400118 commit 8143446

35 files changed

+3636
-141
lines changed

.circleci/config.yml

+4-49
Original file line numberDiff line numberDiff line change
@@ -17,47 +17,6 @@ gpu: &gpu
1717
# -------------------------------------------------------------------------------------
1818
cache_key: &cache_key cache-key-{{ .Environment.CIRCLE_JOB }}-{{ checksum ".circleci/config.yml" }}-{{ checksum "setup.py"}}
1919

20-
install_dep_common: &install_dep_common
21-
- run:
22-
name: Install Common Dependencies
23-
command: |
24-
source activate fairseq
25-
pip install --upgrade setuptools
26-
pip install bitarray boto3 deepspeed editdistance fastBPE iopath ipdb ipython pyarrow pytest sacremoses sentencepiece subword-nmt hydra-core==1.0.7 omegaconf==2.0.6
27-
pip install --progress-bar off pytest
28-
pip install --progress-bar off fairscale
29-
pip install -i https://test.pypi.org/simple/ bitsandbytes-cuda111 -U
30-
python -c 'import torch; print("Torch version:", torch.__version__)'
31-
python -m torch.utils.collect_env
32-
33-
install_dep_fused_ops: &install_dep_fused_ops
34-
# this version of Apex is from Feb 2021 and doesn't work with torch>=1.12
35-
- run:
36-
name: Install Megatron/Apex Dependencies
37-
working_directory: ~/
38-
command: |
39-
source activate fairseq
40-
git clone https://github.com/NVIDIA/apex
41-
cd apex
42-
git checkout e2083df5eb96643c61613b9df48dd4eea6b07690
43-
sed -i '101,107 s/^/#/' setup.py
44-
pip install -v --no-cache-dir --global-option="--cpp_ext" --global-option="--cuda_ext" --global-option="--deprecated_fused_adam" --global-option="--xentropy" --global-option="--fast_multihead_attn" ./
45-
cd ~/
46-
git clone --depth=1 --branch v2.4 https://github.com/NVIDIA/Megatron-LM.git
47-
cd Megatron-LM
48-
pip install -e .
49-
50-
install_dep_xformers: &install_dep_xformers
51-
- run:
52-
name: Install xFormers Dependencies
53-
working_directory: ~/
54-
command: |
55-
source activate fairseq
56-
git clone https://github.com/facebookresearch/xformers.git
57-
cd xformers
58-
pip install -r requirements.txt
59-
pip install -e .
60-
6120
install_dep_pt1_10: &install_dep_pt1_10
6221
- run:
6322
name: Install Pytorch Dependencies
@@ -81,8 +40,9 @@ install_repo: &install_repo
8140
name: Install Repository
8241
command: |
8342
source activate fairseq
84-
pip install .
85-
python setup.py build_ext --inplace
43+
python -m pip install fairscale
44+
python -m pip install -e '.[dev,docs]'
45+
python -c 'import torch; print("Torch version:", torch.__version__)'
8646
8747
run_unittests: &run_unittests
8848
- run:
@@ -134,8 +94,6 @@ jobs:
13494
- restore_cache:
13595
key: *cache_key
13696
- <<: *install_dep_pt1_10
137-
- <<: *install_dep_common
138-
- <<: *install_dep_fused_ops
13997
- save_cache:
14098
paths:
14199
- ~/miniconda/
@@ -155,8 +113,6 @@ jobs:
155113
- restore_cache:
156114
key: *cache_key
157115
- <<: *install_dep_pt1_12
158-
- <<: *install_dep_common
159-
- <<: *install_dep_fused_ops
160116
- save_cache:
161117
paths:
162118
- ~/miniconda/
@@ -168,6 +124,5 @@ workflows:
168124
version: 2
169125
build:
170126
jobs:
171-
# TODO: Figure out how to run APEX on torch 1.12
172-
# - gpu_tests_pt1_12
127+
- gpu_tests_pt1_12
173128
- gpu_tests_pt1_10

examples/speech_to_speech/__init__.py

+2
Original file line numberDiff line numberDiff line change
@@ -2,3 +2,5 @@
22
#
33
# This source code is licensed under the MIT license found in the
44
# LICENSE file in the root directory of this source tree.
5+
6+
from . import unity # noqa
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# Copyright (c) Facebook, Inc. and its affiliates.
2+
#
3+
# This source code is licensed under the MIT license found in the
4+
# LICENSE file in the root directory of this source tree.
5+
6+
from . import sequence_generator # noqa
7+
from . import sequence_generator_multi_decoder # noqa

0 commit comments

Comments
 (0)