Skip to content

Commit 2bd87f6

Browse files
adrinjalaliogrisel
authored andcommitted
Remove python < 3.5 from CI (scikit-learn#12746)
1 parent 4603e48 commit 2bd87f6

39 files changed

+108
-1221
lines changed

.circleci/config.yml

+18-18
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,18 @@
11
version: 2
22

33
jobs:
4-
python3:
4+
doc-min-dependencies:
55
docker:
66
- image: circleci/python:3.6.1
77
environment:
88
- MINICONDA_PATH: ~/miniconda
99
- CONDA_ENV_NAME: testenv
10-
- PYTHON_VERSION: 3
10+
- PYTHON_VERSION: 3.5
11+
- NUMPY_VERSION: 1.11.0
12+
- SCIPY_VERSION: 0.17.0
13+
- PANDAS_VERSION: 0.18.0
14+
- MATPLOTLIB_VERSION: 1.5.1
15+
- SCIKIT_IMAGE_VERSION: 0.12.3
1116
steps:
1217
- checkout
1318
- run: ./build_tools/circle/checkout_merge_commit.sh
@@ -30,31 +35,21 @@ jobs:
3035
root: doc/_build/html
3136
paths: .
3237

33-
34-
python2:
38+
doc:
3539
docker:
36-
# We use the python 3 docker image for simplicity. Python is installed
37-
# through conda and the python version actually used is set via the
38-
# PYTHON_VERSION environment variable.
3940
- image: circleci/python:3.6.1
4041
environment:
41-
# Test examples run with minimal dependencies
4242
- MINICONDA_PATH: ~/miniconda
4343
- CONDA_ENV_NAME: testenv
44-
- PYTHON_VERSION: "2"
45-
- NUMPY_VERSION: "1.10"
46-
- SCIPY_VERSION: "0.16"
47-
- MATPLOTLIB_VERSION: "1.4"
48-
- SCIKIT_IMAGE_VERSION: "0.11"
49-
- PANDAS_VERSION: "0.17.1"
44+
- PYTHON_VERSION: 3
5045
steps:
5146
- checkout
5247
- run: ./build_tools/circle/checkout_merge_commit.sh
5348
- restore_cache:
54-
key: v1-datasets-{{ .Branch }}-python2
49+
key: v1-datasets-{{ .Branch }}
5550
- run: ./build_tools/circle/build_doc.sh
5651
- save_cache:
57-
key: v1-datasets-{{ .Branch }}-python2
52+
key: v1-datasets-{{ .Branch }}
5853
paths:
5954
- ~/scikit_learn_data
6055
- store_artifacts:
@@ -63,6 +58,11 @@ jobs:
6358
- store_artifacts:
6459
path: ~/log.txt
6560
destination: log.txt
61+
# Persists generated documentation so that it can be attached and deployed
62+
# in the 'deploy' step.
63+
- persist_to_workspace:
64+
root: doc/_build/html
65+
paths: .
6666

6767
lint:
6868
docker:
@@ -114,8 +114,8 @@ workflows:
114114
version: 2
115115
build-doc-and-deploy:
116116
jobs:
117-
- python3
118-
- python2
117+
- doc
118+
- doc-min-dependencies
119119
- lint
120120
- pypy3:
121121
filters:

.travis.yml

+14-11
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ cache:
99
- $HOME/.cache/pip
1010
- $HOME/.ccache
1111

12-
dist: trusty
12+
dist: xenial
1313

1414
env:
1515
global:
@@ -21,26 +21,29 @@ env:
2121
matrix:
2222
include:
2323
# This environment tests that scikit-learn can be built against
24-
# versions of numpy, scipy with ATLAS that comes with Ubuntu Trusty 14.04
25-
# i.e. numpy 1.8.2 and scipy 0.13.3
26-
- env: DISTRIB="ubuntu" PYTHON_VERSION="2.7" CYTHON_VERSION="0.23.5"
27-
COVERAGE=true
24+
# versions of numpy, scipy with ATLAS that comes with Ubuntu Xenial 16.04
25+
# i.e. numpy 1.11 and scipy 0.17
26+
- env: DISTRIB="ubuntu" PYTHON_VERSION="3.5" CYTHON_VERSION="0.28.6"
27+
NUMPY_VERSION="1.11.0" SCIPY_VERSION="0.17.0"
28+
PILLOW_VERSION="4.0.0" COVERAGE=true
29+
SKLEARN_SITE_JOBLIB=1 JOBLIB_VERSION="0.11"
2830
if: type != cron
2931
addons:
3032
apt:
3133
packages:
3234
# these only required by the DISTRIB="ubuntu" builds:
33-
- python-scipy
35+
- python3-scipy
3436
- libatlas3-base
37+
- libatlas-base-dev
3538
- libatlas-dev
36-
# Python 3.4 build
37-
- env: DISTRIB="conda" PYTHON_VERSION="3.4" INSTALL_MKL="false"
38-
NUMPY_VERSION="1.10.4" SCIPY_VERSION="0.16.1" CYTHON_VERSION="0.25.2"
39+
# Python 3.5 build without SITE_JOBLIB
40+
- env: DISTRIB="conda" PYTHON_VERSION="3.5" INSTALL_MKL="false"
41+
NUMPY_VERSION="1.11.0" SCIPY_VERSION="0.17.0" CYTHON_VERSION="0.25.2"
3942
PILLOW_VERSION="4.0.0" COVERAGE=true
4043
if: type != cron
4144
# Python 3.5 build
4245
- env: DISTRIB="conda" PYTHON_VERSION="3.5" INSTALL_MKL="false"
43-
NUMPY_VERSION="1.10.4" SCIPY_VERSION="0.16.1" CYTHON_VERSION="0.25.2"
46+
NUMPY_VERSION="1.11.0" SCIPY_VERSION="0.17.0" CYTHON_VERSION="0.25.2"
4447
PILLOW_VERSION="4.0.0" COVERAGE=true
4548
SKLEARN_SITE_JOBLIB=1 JOBLIB_VERSION="0.11"
4649
if: type != cron
@@ -57,7 +60,7 @@ matrix:
5760
# This environment tests scikit-learn against numpy and scipy master
5861
# installed from their CI wheels in a virtualenv with the Python
5962
# interpreter provided by travis.
60-
- python: 3.6
63+
- python: 3.7
6164
env: DISTRIB="scipy-dev" CHECK_WARNINGS="true"
6265
if: type = cron OR commit_message =~ /\[scipy-dev\]/
6366

appveyor.yml

+2-3
Original file line numberDiff line numberDiff line change
@@ -22,11 +22,10 @@ environment:
2222
PYTHON_ARCH: "64"
2323
CHECK_WARNINGS: "true"
2424

25-
- PYTHON: "C:\\Python27"
26-
PYTHON_VERSION: "2.7.8"
25+
- PYTHON: "C:\\Python35"
26+
PYTHON_VERSION: "3.5.6"
2727
PYTHON_ARCH: "32"
2828

29-
3029
# Because we only have a single worker, we don't want to waste precious
3130
# appveyor CI time and make other PRs wait for repeated failures in a failing
3231
# PR. The following option cancels pending jobs in a given PR after the first

build_tools/travis/install.sh

+2-7
Original file line numberDiff line numberDiff line change
@@ -70,21 +70,16 @@ if [[ "$DISTRIB" == "conda" ]]; then
7070
fi
7171
make_conda $TO_INSTALL
7272

73-
# for python 3.4, conda does not have recent pytest packages
74-
if [[ "$PYTHON_VERSION" == "3.4" ]]; then
75-
pip install pytest==3.5
76-
fi
77-
7873
elif [[ "$DISTRIB" == "ubuntu" ]]; then
7974
# At the time of writing numpy 1.9.1 is included in the travis
8075
# virtualenv but we want to use the numpy installed through apt-get
8176
# install.
8277
deactivate
8378
# Create a new virtualenv using system site packages for python, numpy
8479
# and scipy
85-
virtualenv --system-site-packages testvenv
80+
virtualenv --system-site-packages --python=python3 testvenv
8681
source testvenv/bin/activate
87-
pip install pytest pytest-cov cython==$CYTHON_VERSION
82+
pip install pytest pytest-cov cython==$CYTHON_VERSION joblib==$JOBLIB_VERSION
8883

8984
elif [[ "$DISTRIB" == "scipy-dev" ]]; then
9085
make_conda python=3.7

conftest.py

+1-6
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,6 @@
1111
import pytest
1212
from _pytest.doctest import DoctestItem
1313

14-
from sklearn.utils.fixes import PY3_OR_LATER
15-
1614
PYTEST_MIN_VERSION = '3.3.0'
1715

1816
if LooseVersion(pytest.__version__) < PYTEST_MIN_VERSION:
@@ -47,11 +45,8 @@ def pytest_collection_modifyitems(config, items):
4745
item.add_marker(skip_network)
4846

4947
# numpy changed the str/repr formatting of numpy arrays in 1.14. We want to
50-
# run doctests only for numpy >= 1.14. We want to skip the doctest for
51-
# python 2 due to unicode.
48+
# run doctests only for numpy >= 1.14.
5249
skip_doctests = False
53-
if not PY3_OR_LATER:
54-
skip_doctests = True
5550
try:
5651
import numpy as np
5752
if LooseVersion(np.__version__) < LooseVersion('1.14'):

examples/model_selection/plot_precision_recall.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@
137137
# ................................
138138
from sklearn.metrics import precision_recall_curve
139139
import matplotlib.pyplot as plt
140-
from sklearn.utils.fixes import signature
140+
from inspect import signature
141141

142142
precision, recall, _ = precision_recall_curve(y_test, y_score)
143143

setup.py

+3-10
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,7 @@
1111
from distutils.command.clean import clean as Clean
1212
from pkg_resources import parse_version
1313
import traceback
14-
15-
if sys.version_info[0] < 3:
16-
import __builtin__ as builtins
17-
else:
18-
import builtins
14+
import builtins
1915

2016
# This is a bit (!) hackish: we are setting a global variable so that the main
2117
# sklearn __init__ can detect if it is being loaded by the setup routine, to
@@ -45,8 +41,8 @@
4541
SCIPY_MIN_VERSION = '1.1.0'
4642
NUMPY_MIN_VERSION = '1.14.0'
4743
else:
48-
SCIPY_MIN_VERSION = '0.13.3'
49-
NUMPY_MIN_VERSION = '1.8.2'
44+
SCIPY_MIN_VERSION = '0.17.0'
45+
NUMPY_MIN_VERSION = '1.11.0'
5046

5147

5248
# Optional setuptools features
@@ -183,10 +179,7 @@ def setup_package():
183179
'Operating System :: POSIX',
184180
'Operating System :: Unix',
185181
'Operating System :: MacOS',
186-
'Programming Language :: Python :: 2',
187-
'Programming Language :: Python :: 2.7',
188182
'Programming Language :: Python :: 3',
189-
'Programming Language :: Python :: 3.4',
190183
'Programming Language :: Python :: 3.5',
191184
'Programming Language :: Python :: 3.6',
192185
'Programming Language :: Python :: 3.7',

sklearn/base.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,11 @@
66
import copy
77
import warnings
88
from collections import defaultdict
9+
from inspect import signature
910

1011
import numpy as np
1112
from scipy import sparse
1213
from .externals import six
13-
from .utils.fixes import signature
1414
from . import __version__
1515

1616

sklearn/calibration.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010
from __future__ import division
1111
import warnings
12+
from inspect import signature
1213

1314
from math import log
1415
import numpy as np
@@ -20,7 +21,6 @@
2021
from .preprocessing import label_binarize, LabelBinarizer
2122
from .utils import check_X_y, check_array, indexable, column_or_1d
2223
from .utils.validation import check_is_fitted, check_consistent_length
23-
from .utils.fixes import signature
2424
from .isotonic import IsotonicRegression
2525
from .svm import LinearSVC
2626
from .model_selection import check_cv

sklearn/cluster/tests/test_k_means.py

-4
Original file line numberDiff line numberDiff line change
@@ -237,10 +237,6 @@ def test_k_means_new_centers():
237237

238238
@if_safe_multiprocessing_with_blas
239239
def test_k_means_plus_plus_init_2_jobs():
240-
if sys.version_info[:2] < (3, 4):
241-
raise SkipTest(
242-
"Possible multi-process bug with some BLAS under Python < 3.4")
243-
244240
km = KMeans(init="k-means++", n_clusters=n_clusters, n_jobs=2,
245241
random_state=42).fit(X)
246242
_check_fitted_model(km)

sklearn/datasets/covtype.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
from gzip import GzipFile
1818
import logging
1919
from os.path import dirname, exists, join
20-
from os import remove
20+
from os import remove, makedirs
2121

2222
import numpy as np
2323

@@ -26,7 +26,6 @@
2626
from .base import RemoteFileMetadata
2727
from ..utils import Bunch
2828
from .base import _pkl_filepath
29-
from ..utils.fixes import makedirs
3029
from ..utils import _joblib
3130
from ..utils import check_random_state
3231

sklearn/datasets/kddcup99.py

+1-8
Original file line numberDiff line numberDiff line change
@@ -205,14 +205,7 @@ def _fetch_brute_kddcup99(data_home=None,
205205
"""
206206

207207
data_home = get_data_home(data_home=data_home)
208-
if sys.version_info[0] == 3:
209-
# The zlib compression format use by joblib is not compatible when
210-
# switching from Python 2 to Python 3, let us use a separate folder
211-
# under Python 3:
212-
dir_suffix = "-py3"
213-
else:
214-
# Backward compat for Python 2 users
215-
dir_suffix = ""
208+
dir_suffix = "-py3"
216209

217210
if percent10:
218211
kddcup_dir = join(data_home, "kddcup99_10" + dir_suffix)

sklearn/datasets/mldata.py

+3-10
Original file line numberDiff line numberDiff line change
@@ -7,16 +7,9 @@
77
from os.path import join, exists
88
import re
99
import numbers
10-
try:
11-
# Python 2
12-
from urllib2 import HTTPError
13-
from urllib2 import quote
14-
from urllib2 import urlopen
15-
except ImportError:
16-
# Python 3+
17-
from urllib.error import HTTPError
18-
from urllib.parse import quote
19-
from urllib.request import urlopen
10+
from urllib.error import HTTPError
11+
from urllib.parse import quote
12+
from urllib.request import urlopen
2013

2114
import numpy as np
2215
import scipy as sp

sklearn/datasets/openml.py

+5-20
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,14 @@
77
from contextlib import closing
88
from functools import wraps
99

10-
try:
11-
# Python 3+
12-
from urllib.request import urlopen, Request
13-
except ImportError:
14-
# Python 2
15-
from urllib2 import urlopen, Request
16-
10+
from urllib.request import urlopen, Request
1711

1812
import numpy as np
1913
import scipy.sparse
2014

2115
from sklearn.externals import _arff
2216
from .base import get_data_home
23-
from ..externals.six import string_types, PY2, BytesIO
17+
from ..externals.six import string_types
2418
from ..externals.six.moves.urllib.error import HTTPError
2519
from ..utils import Bunch
2620

@@ -89,8 +83,6 @@ def is_gzip(_fsrc):
8983
if data_home is None:
9084
fsrc = urlopen(req)
9185
if is_gzip(fsrc):
92-
if PY2:
93-
fsrc = BytesIO(fsrc.read())
9486
return gzip.GzipFile(fileobj=fsrc, mode='rb')
9587
return fsrc
9688

@@ -357,16 +349,9 @@ def _arff_load():
357349
else:
358350
return_type = _arff.DENSE
359351

360-
if PY2:
361-
arff_file = _arff.load(
362-
response.read(),
363-
encode_nominal=encode_nominal,
364-
return_type=return_type,
365-
)
366-
else:
367-
arff_file = _arff.loads(response.read().decode('utf-8'),
368-
encode_nominal=encode_nominal,
369-
return_type=return_type)
352+
arff_file = _arff.loads(response.read().decode('utf-8'),
353+
encode_nominal=encode_nominal,
354+
return_type=return_type)
370355
return arff_file
371356

372357
return _arff_load()

0 commit comments

Comments
 (0)