Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 6 additions & 7 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ language: python
sudo: false
env:
global:
- PYTHON_VERSION=3.5
- PYTHON_VERSION=3.6
matrix:
- TEST_ADD_STUDIES=False COVER_PACKAGE=qiita_db
- TEST_ADD_STUDIES=False COVER_PACKAGE=qiita_pet
Expand All @@ -22,18 +22,17 @@ before_install:
install:
# install a few of the dependencies that pip would otherwise try to install
# when intalling scikit-bio
- travis_retry conda create -q --yes -n qiita python=2.7 pip nose flake8
pyzmq 'networkx<2.0' pyparsing natsort mock future libgfortran seaborn nltk
'pandas>=0.18' 'matplotlib>=1.1.0' 'scipy>0.13.0' 'numpy>=1.7' 'h5py>=2.3.1'
- travis_retry conda create -q --yes -n qiita python=3.6 pip nose flake8
pyzmq networkx pyparsing natsort mock future libgfortran seaborn nltk pandas
matplotlib scipy numpy h5py
- source activate qiita
- pip install pip==18.1
- pip install sphinx sphinx-bootstrap-theme nose-timer codecov 'Click==6.7'
- 'echo "backend: Agg" > matplotlibrc'
- pip install sphinx sphinx-bootstrap-theme nose-timer codecov Click
- git clone https://github.com/nicolasff/webdis
- pushd webdis
- make
- ./webdis &
- popd
# export PATH="$PATH:/Applications/Postgres.app/Contents/Versions/9.5/bin/"
- travis_retry pip install . --process-dependency-links
# loading redbiom with Qiita's test set
# but first let's make sure redis is empty
Expand Down
5 changes: 2 additions & 3 deletions INSTALL.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ conda update conda
Setup a virtual environment in conda named `qiita` by executing the following:

```bash
conda create --yes --name qiita python=2.7 pip==18.1 nose flake8 pyzmq networkx pyparsing natsort mock future libgfortran seaborn 'pandas>=0.18' 'matplotlib>=1.1.0' 'scipy>0.13.0' 'numpy>=1.7' 'h5py>=2.3.1' hdf5
conda create --yes --name qiita python=3.6 pip==18.1 nose flake8 pyzmq networkx pyparsing natsort mock future libgfortran seaborn 'pandas>=0.18' 'matplotlib>=1.1.0' 'scipy>0.13.0' 'numpy>=1.7' 'h5py>=2.3.1' hdf5
```

If you receive an error message about conda being unable to find one of the specified packages in its repository, you will have to manually find the appropriate conda channel that they belong to (see troubleshooting section below).
Expand Down Expand Up @@ -323,8 +323,7 @@ Now you can re-run your `conda create` command:

### python

As a general rule of thumb you will want to have an updated version of Python
2.7 and a specific version of pip (`pip install pip==18.1` will do the trick).
As a general rule of thumb you will want to have an updated version of Python 3.6.

H5PY is known to cause a few problems, however their [installation
instructions](http://docs.h5py.org/en/latest/build.html) are a great resource
Expand Down
6 changes: 3 additions & 3 deletions qiita_core/support_files/config_test.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -26,16 +26,16 @@ REQUIRE_APPROVAL = True
BASE_URL = https://localhost:21174

# Download path files
UPLOAD_DATA_DIR = /home/travis/miniconda3/envs/qiita/lib/python2.7/site-packages/qiita_db/support_files/test_data/uploads/
UPLOAD_DATA_DIR = /home/travis/miniconda3/envs/qiita/lib/python3.6/site-packages/qiita_db/support_files/test_data/uploads/

# Working directory path
WORKING_DIR = /home/travis/miniconda3/envs/qiita/lib/python2.7/site-packages/qiita_db/support_files/test_data/working_dir/
WORKING_DIR = /home/travis/miniconda3/envs/qiita/lib/python3.6/site-packages/qiita_db/support_files/test_data/working_dir/

# Maximum upload size (in Gb)
MAX_UPLOAD_SIZE = 100

# Path to the base directory where the data files are going to be stored
BASE_DATA_DIR = /home/travis/miniconda3/envs/qiita/lib/python2.7/site-packages/qiita_db/support_files/test_data/
BASE_DATA_DIR = /home/travis/miniconda3/envs/qiita/lib/python3.6/site-packages/qiita_db/support_files/test_data/

# Valid upload extension, comma separated. Empty for no uploads
VALID_UPLOAD_EXTENSION = fastq,fastq.gz,txt,tsv,sff,fna,qual
Expand Down
40 changes: 20 additions & 20 deletions qiita_db/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,26 @@
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------

import base
import util
import sql_connection
import metadata_template
import analysis
import artifact
import archive
import commands
import environment_manager
import exceptions
import investigation
import logger
import meta_util
import ontology
import portal
import reference
import software
import study
import user
import processing_job
from . import base
from . import util
from . import sql_connection
from . import metadata_template
from . import analysis
from . import artifact
from . import archive
from . import commands
from . import environment_manager
from . import exceptions
from . import investigation
from . import logger
from . import meta_util
from . import ontology
from . import portal
from . import reference
from . import software
from . import study
from . import user
from . import processing_job

__version__ = "0.2.0-dev"

Expand Down
2 changes: 1 addition & 1 deletion qiita_db/artifact.py
Original file line number Diff line number Diff line change
Expand Up @@ -1169,7 +1169,7 @@ def _add_edge(edges, src, dest):
# status. Approach: Loop over all the artifacts and add all the
# jobs that have been attached to them.
visited = set()
queue = nodes.keys()
queue = list(nodes.keys())
while queue:
current = queue.pop(0)
if current not in visited:
Expand Down
6 changes: 2 additions & 4 deletions qiita_db/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,17 +175,15 @@ def __init__(self, id_):
# the User object) are strings. Moreover, some integer IDs are passed
# as strings (e.g., '5'). Therefore, explicit type-checking is needed
# here to accommodate these possibilities.
if not isinstance(id_, (int, long, str, unicode)):
if not isinstance(id_, (int, str)):
raise TypeError("id_ must be a numerical or text type (not %s) "
"when instantiating "
"%s" % (id_.__class__.__name__,
self.__class__.__name__))

if isinstance(id_, (str, unicode)):
if isinstance(id_, (str)):
if id_.isdigit():
id_ = int(id_)
elif isinstance(id_, long):
id_ = int(id_)

with qdb.sql_connection.TRN:
self._check_subclass()
Expand Down
9 changes: 5 additions & 4 deletions qiita_db/environment_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def _populate_test_db():


def _add_ontology_data():
print ('Loading Ontology Data')
print('Loading Ontology Data')
if not exists(reference_base_dir):
mkdir(reference_base_dir)

Expand Down Expand Up @@ -309,8 +309,8 @@ def drop_environment(ask_for_confirmation):
if ask_for_confirmation:
confirm = ''
while confirm not in ('Y', 'y', 'N', 'n'):
confirm = raw_input("THIS IS NOT A TEST ENVIRONMENT.\n"
"Proceed with drop? (y/n)")
confirm = input("THIS IS NOT A TEST ENVIRONMENT.\n"
"Proceed with drop? (y/n)")

do_drop = confirm in ('Y', 'y')
else:
Expand Down Expand Up @@ -438,7 +438,8 @@ def patch(patches_dir=PATCHES_DIR, verbose=False, test=False):
if verbose:
print('\t\tApplying python patch %s...'
% py_patch_filename)
execfile(py_patch_fp, {})
with open(py_patch_fp) as py_patch:
exec(py_patch.read(), globals())

# before moving to jsonb for sample/prep info files (patch 69.sql),
# one of the patches used to regenerate the sample information file
Expand Down
27 changes: 16 additions & 11 deletions qiita_db/handlers/oauth2.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,13 +90,14 @@ def wrapper(handler, *args, **kwargs):
'invalid_grant')
return
# Check daily rate limit for key if password style key
if db_token['grant_type'] == 'password':
limit_key = '%s_%s_daily_limit' % (db_token['client_id'],
db_token['user'])
if db_token[b'grant_type'] == b'password':
limit_key = '%s_%s_daily_limit' % (
db_token[b'client_id'].decode('ascii'),
db_token[b'user'].decode('ascii'))
limiter = r_client.get(limit_key)
if limiter is None:
# Set limit to 5,000 requests per day
r_client.setex(limit_key, 5000, 86400)
r_client.setex(limit_key, 86400, 5000)
else:
r_client.decr(limit_key)
if int(r_client.get(limit_key)) <= 0:
Expand Down Expand Up @@ -212,19 +213,23 @@ def set_token(self, client_id, grant_type, user=None, timeout=3600):
"""
token = self.generate_access_token()

r_client.hset(token, 'timestamp', datetime.datetime.now())
r_client.hset(token, 'client_id', client_id)
r_client.hset(token, 'grant_type', grant_type)
r_client.expire(token, timeout)
token_info = {
'timestamp': datetime.datetime.now().strftime('%m-%d-%y %H:%M:%S'),
'client_id': client_id,
'grant_type': grant_type
}
if user:
r_client.hset(token, 'user', user)
token_info['user'] = user

r_client.hmset(token, token_info)
r_client.expire(token, timeout)
if grant_type == 'password':
# Check if client has access limit key, and if not, create it
limit_key = '%s_%s_daily_limit' % (client_id, user)
limiter = r_client.get(limit_key)
if limiter is None:
# Set limit to 5,000 requests per day
r_client.setex(limit_key, 5000, 86400)
r_client.setex(limit_key, 86400, 5000)

self.write({'access_token': token,
'token_type': 'Bearer',
Expand Down Expand Up @@ -367,7 +372,7 @@ def post(self):
return
try:
client_id, client_secret = urlsafe_b64decode(
header_info[1]).split(':')
header_info[1]).decode('ascii').split(':')
except ValueError:
# Split didn't work, so invalid information sent
_oauth_error(self, 'Oauth2 error: invalid base64 encoded info',
Expand Down
82 changes: 43 additions & 39 deletions qiita_db/handlers/tests/test_oauth2.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,29 @@ class OAuth2BaseHandlerTests(TestHandlerBase):
def setUp(self):
# Create client test authentication token
self.client_token = 'SOMEAUTHTESTINGTOKENHERE2122'
r_client.hset(self.client_token, 'timestamp', '12/12/12 12:12:00')
r_client.hset(self.client_token, 'client_id', 'test123123123')
r_client.hset(self.client_token, 'grant_type', 'client')
token_info = {
'timestamp': '12/12/12 12:12:00',
'client_id': 'test123123123',
'grant_type': 'client'

}
r_client.hmset(self.client_token, token_info)
r_client.expire(self.client_token, 5)

# Create username test authentication token
self.user_token = 'SOMEAUTHTESTINGTOKENHEREUSERNAME'
r_client.hset(self.user_token, 'timestamp', '12/12/12 12:12:00')
r_client.hset(self.user_token, 'client_id', 'testuser')
r_client.hset(self.user_token, 'grant_type', 'password')
r_client.hset(self.user_token, 'user', 'test@foo.bar')
token_info = {
'timestamp': '12/12/12 12:12:00',
'client_id': 'testuser',
'grant_type': 'password',
'user': 'test@foo.bar'
}
r_client.hmset(self.user_token, token_info)
r_client.expire(self.user_token, 5)

# Create test access limit token
self.user_rate_key = 'testuser_test@foo.bar_daily_limit'
r_client.setex(self.user_rate_key, 2, 5)
r_client.setex(self.user_rate_key, 5, 2)
super(OAuth2BaseHandlerTests, self).setUp()

def test_authenticate_header_client(self):
Expand All @@ -45,7 +54,7 @@ def test_authenticate_header_username(self):

# Check rate limiting works
self.assertEqual(int(r_client.get(self.user_rate_key)), 1)
r_client.setex('testuser_test@foo.bar_daily_limit', 0, 2)
r_client.setex('testuser_test@foo.bar_daily_limit', 1, 0)
obs = self.get('/qiita_db/artifacts/100/', headers={
'Authorization': 'Bearer ' + self.user_token})
exp = {'error': 'invalid_grant',
Expand Down Expand Up @@ -88,20 +97,20 @@ def test_authenticate_client_header(self):
'pLaEFtbUNXWnVhYmUwTzVNcDI4czE='})
self.assertEqual(obs.code, 200)
obs_body = loads(obs.body)
exp = {'access_token': 'token',
exp = {'access_token': obs_body['access_token'],
'token_type': 'Bearer',
'expires_in': 3600}
self.assertItemsEqual(obs_body.keys(), exp.keys())
self.assertEqual(obs_body['token_type'], exp['token_type'])
self.assertEqual(obs_body['expires_in'], exp['expires_in'])
self.assertEqual(len(obs_body['access_token']), 55)
self.assertEqual(type(obs_body['access_token']), unicode)
self.assertDictEqual(obs_body, exp)

# Make sure token in system with proper ttl
token = r_client.hgetall(obs_body['access_token'])
self.assertNotEqual(token, {})
self.assertItemsEqual(token.keys(), ['timestamp', 'client_id',
'grant_type'])
exp = {
b'timestamp': token[b'timestamp'],
b'client_id': (b'19ndkO3oMKsoChjVVWluF7QkxHRfYhTKSFbAV'
b't8IhK7gZgDaO4'),
b'grant_type': b'client'
}
self.assertDictEqual(token, exp)
self.assertEqual(r_client.ttl(obs_body['access_token']), 3600)

def test_authenticate_client_post(self):
Expand All @@ -115,21 +124,20 @@ def test_authenticate_client_post(self):
'KhAmmCWZuabe0O5Mp28s1'})
self.assertEqual(obs.code, 200)
obs_body = loads(obs.body)
exp = {'access_token': 'placeholder',
exp = {'access_token': obs_body['access_token'],
'token_type': 'Bearer',
'expires_in': 3600}
self.assertItemsEqual(obs_body.keys(), exp.keys())
self.assertEqual(obs_body['token_type'], exp['token_type'])
self.assertEqual(obs_body['expires_in'], exp['expires_in'])
self.assertEqual(len(obs_body['access_token']), 55)
self.assertEqual(type(obs_body['access_token']), unicode)
self.assertDictEqual(obs_body, exp)

# Make sure token in system with proper ttl
token = r_client.hgetall(obs_body['access_token'])
self.assertNotEqual(token, {})
self.assertItemsEqual(token.keys(), ['timestamp', 'client_id',
'grant_type'])
self.assertEqual(token['grant_type'], 'client')
exp = {
b'timestamp': token[b'timestamp'],
b'client_id': (b'19ndkO3oMKsoChjVVWluF7QkxHRfYhTKSFbAVt8'
b'IhK7gZgDaO4'),
b'grant_type': b'client'
}
self.assertDictEqual(token, exp)
self.assertEqual(r_client.ttl(obs_body['access_token']), 3600)

def test_authenticate_client_bad_base64_hash(self):
Expand Down Expand Up @@ -208,22 +216,18 @@ def test_authenticate_password(self):
'password': 'password'})
self.assertEqual(obs.code, 200)
obs_body = loads(obs.body)
exp = {'access_token': 'placeholder',
exp = {'access_token': obs_body['access_token'],
'token_type': 'Bearer',
'expires_in': 3600}
self.assertItemsEqual(obs_body.keys(), exp.keys())
self.assertEqual(obs_body['token_type'], exp['token_type'])
self.assertEqual(obs_body['expires_in'], exp['expires_in'])
self.assertEqual(len(obs_body['access_token']), 55)
self.assertEqual(type(obs_body['access_token']), unicode)
self.assertDictEqual(obs_body, exp)

# Make sure token in system with proper ttl
token = r_client.hgetall(obs_body['access_token'])
self.assertNotEqual(token, {})
self.assertItemsEqual(token.keys(), ['timestamp', 'user', 'client_id',
'grant_type'])
self.assertEqual(token['user'], 'test@foo.bar')
self.assertEqual(token['grant_type'], 'password')
exp = {b'timestamp': token[b'timestamp'],
b'user': b'test@foo.bar',
b'client_id': token[b'client_id'],
b'grant_type': b'password'}
self.assertDictEqual(token, exp)
self.assertEqual(r_client.ttl(obs_body['access_token']), 3600)

def test_authenticate_password_non_user_client_id_header(self):
Expand Down
4 changes: 2 additions & 2 deletions qiita_db/meta_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
import matplotlib.pyplot as plt
import matplotlib as mpl
from base64 import b64encode
from urllib import quote
from StringIO import StringIO
from urllib.parse import quote
from io import StringIO
from future.utils import viewitems
from datetime import datetime
from tarfile import open as topen, TarInfo
Expand Down
8 changes: 4 additions & 4 deletions qiita_db/metadata_template/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------

import constants
import util
import sample_template
import prep_template
from . import constants
from . import util
from . import sample_template
from . import prep_template

__all__ = ["sample_template", "prep_template", "util", "constants"]
Loading