Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Python 3.6 #606

Open
wants to merge 11 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 29 additions & 11 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,36 +1,36 @@
sudo: False
sudo: required

language: python
language: generic

matrix:
fast_finish: true
include:
- python: 2.7
env:
- python: 3.4
env:
- python: 3.5
env:
- python: 3.6
env:
- python: 2.7
env: PANDAS_VERSION="git+https://github.com/pydata/pandas"
- python: 3.4
env: PANDAS_VERSION="git+https://github.com/pydata/pandas"
- python: 3.5
env: PANDAS_VERSION="git+https://github.com/pydata/pandas"
- python: 3.6
env: PANDAS_VERSION="git+https://github.com/pydata/pandas"
allow_failures:
- python: 2.7
env: PANDAS_VERSION="git+https://github.com/pydata/pandas"
- python: 3.4
env: PANDAS_VERSION="git+https://github.com/pydata/pandas"
- python: 3.5
env: PANDAS_VERSION="git+https://github.com/pydata/pandas"
- python: 3.6
env: PANDAS_VERSION="git+https://github.com/pydata/pandas"

addons:
postgresql: "9.3"
postgresql: "9.5"

services:
- mongodb
- mysql
- mongodb
- mysql

install:
# Install conda
Expand All @@ -40,6 +40,24 @@ install:
- conda config --set always_yes yes --set changeps1 no
- conda update conda

# Install dependencies
# Use conda **ONLY** for numpy and pandas (if not pulling from master), this
# speeds up the builds a lot. Use the normal pip install for the rest.
- conda create -n odo numpy=1.11.2
- source activate odo
# update setuptools and pip
- conda update setuptools pip
- if [ -n "$PANDAS_VERSION" ]; then pip install cython==0.27.3; pip install $PANDAS_VERSION; else conda install pandas=0.22.0; fi

# install the frozen ci dependencies
- pip install -e .[ci]

# datashape
- pip install git+git://github.com/blaze/datashape.git

# redshift sqlalchemy dialect
- pip install --upgrade git+git://github.com/graingert/redshift_sqlalchemy

# install python stuff
- source etc/ci-install.sh

Expand Down
10 changes: 5 additions & 5 deletions conda.recipe/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ build:
{% else %}string: py{{ environ.get('PY_VER').replace('.', '') }}_{{ environ.get('GIT_BUILD_STR', 'GIT_STUB') }}{% endif %}

source:
git_url: ../
path: ../

requirements:
build:
Expand All @@ -18,7 +18,7 @@ requirements:
- pandas >=0.15.0
- toolz >=0.7.3
- multipledispatch >=0.4.7
- networkx
- networkx >1.0,<2.0
- dask >=0.11.1

run:
Expand All @@ -28,7 +28,7 @@ requirements:
- pandas >=0.15.0
- toolz >=0.7.3
- multipledispatch >=0.4.7
- networkx
- networkx >1.0,<2.0
- dask >=0.11.1

test:
Expand All @@ -38,8 +38,8 @@ test:
- pytables >=3.0.0
- sqlalchemy >=0.8.0
- bcolz
- pymongo # [not (py35 or win)]
- mock
- pymongo >=2.8,<3 # [not (py35 or win)]
- dask
- paramiko # [not win]
- sqlite3 # [win]
Expand All @@ -50,7 +50,7 @@ test:
- psycopg2 # [unix]

commands:
- pip install sas7bdat && py.test -vx --pyargs odo -rsxX
- pip install sas7bdat && py.test -v --pyargs odo -rsxX

about:
home: https://github.com/blaze/odo/
Expand Down
2 changes: 1 addition & 1 deletion etc/ci-install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ if [ -n "$PANDAS_VERSION" ];then
pip install cython==0.24.1
pip install $PANDAS_VERSION
else
conda install pandas=0.19.0
conda install pandas=0.22.1
fi

conda install pytables=3.3.0
Expand Down
8 changes: 4 additions & 4 deletions etc/requirements_ci.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ cffi==1.8.3
click==6.6
cloudpickle==0.2.1
cryptography==1.5.2
dask==0.11.1
dask==0.16.1
decorator==4.0.10
distributed==1.13.3
docutils==0.12
Expand All @@ -27,12 +27,12 @@ pandas==0.19.0 # # please update the .travis file!
paramiko==2.0.2
partd==0.3.6
psutil==4.4.2
psycopg2==2.6.2
psycopg2==2.7.4
py==1.4.34
pyasn1==0.1.9
pycparser==2.17
pymongo==2.9.4
PyMySQL==0.7.9
PyMySQL==0.7.11
pytest==3.2.3
python-dateutil==2.5.3
pytz==2016.7
Expand All @@ -43,7 +43,7 @@ s3fs==0.0.7
s3transfer==0.1.9
sas7bdat==2.0.7
six==1.10.0
SQLAlchemy==1.1.3
SQLAlchemy==1.2.1
tables==3.3.0
tblib==1.3.0
toolz==0.8.0
Expand Down
2 changes: 1 addition & 1 deletion odo/backends/pandas.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@


def dshape_from_pandas(col):
if isinstance(col.dtype, categorical):
if col.dtype.name == 'category':
return Categorical(col.cat.categories.tolist())
elif col.dtype.kind == 'M':
tz = getattr(col.dtype, 'tz', None)
Expand Down
2 changes: 2 additions & 0 deletions odo/backends/tests/test_bokeh.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
bokeh = pytest.importorskip('bokeh')

from odo.backends.bokeh import convert, pd, ColumnDataSource
import numpy as np
import numpy.testing as nt
import pandas.util.testing as tm


Expand Down
12 changes: 6 additions & 6 deletions odo/backends/tests/test_dask_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,14 +59,14 @@ def test_into_inplace():


def test_insert_to_ooc():
x = np.arange(600).reshape((20, 30))
y = np.empty(shape=x.shape, dtype=x.dtype)
a = convert(Array, x, chunks=(4, 5))
in_arr = np.arange(600).reshape((20, 30))
out_arr = np.empty(shape=in_arr.shape, dtype=in_arr.dtype)
d_arr = convert(Array, in_arr, chunks=(4, 5))

dsk = insert_to_ooc(y, a)
core.get(merge(dsk, a.dask), list(dsk.keys()))
dsk = insert_to_ooc(d_arr, out_arr)
core.get(merge(dsk, d_arr.dask), list(dsk.keys()))

assert eq(y, x)
assert eq(out_arr, in_arr)


def test_array_interface():
Expand Down
2 changes: 1 addition & 1 deletion odo/backends/tests/test_dask_dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def test_discover():
ddf = dd.from_pandas(df, npartitions=2)
assert_dshape_equal(discover(ddf),
var * Record([('x', Categorical(['a', 'b', 'c'])),
('y', int64), ('z', float64)]))
('y', int64), ('z', float64)]))
assert_dshape_equal(discover(ddf.x), var * Categorical(['a', 'b', 'c']))


Expand Down
28 changes: 15 additions & 13 deletions odo/backends/tests/test_mysql.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,25 @@
from __future__ import absolute_import, division, print_function

import pytest

pymysql = pytest.importorskip('pymysql')

from datashape import var, DataShape, Record, dshape
import csv as csv_module
import getpass
import itertools
from odo.backends.csv import CSV
from odo import resource, odo
import sqlalchemy
import sqlalchemy as sa
import os
import sys
import csv as csv_module
import getpass
from odo import drop, discover

import pytest
import sqlalchemy
import sqlalchemy as sa
from datashape import var, Record, dshape

from odo import resource, odo, drop, discover
from odo.backends.csv import CSV
from odo.utils import tmpfile


pymysql = pytest.importorskip('pymysql')



pytestmark = pytest.mark.skipif(sys.platform == 'win32',
reason='not well tested on win32 mysql')

Expand Down Expand Up @@ -232,7 +234,7 @@ def test_different_encoding(name):
sql = odo(os.path.join(os.path.dirname(__file__), 'encoding.csv'),
url + '::%s' % name,
encoding=encoding)
except sa.exc.OperationalError as e:
except (sa.exc.OperationalError, pymysql.err.InternalError) as e:
pytest.skip(str(e))
else:
try:
Expand Down
2 changes: 1 addition & 1 deletion odo/tests/test_convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def test_numpy_launders_python_types():

def test_numpy_asserts_type_after_dataframe():
df = pd.DataFrame({'name': ['Alice'], 'amount': [100]})
ds = datashape.dshape('1 * {name: string[10, "ascii"], amount: int32}')
ds = datashape.dshape('1 * {amount: int32, name: string[10, "ascii"]}')
x = convert(np.ndarray, df, dshape=ds)
assert discover(x) == ds

Expand Down