Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

i13 python api #58

Merged
merged 21 commits into from
Jan 25, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Added configuration to make test environment and outcomes consistent
  • Loading branch information
ppillay committed Jan 22, 2018
commit 59961602b0c5f02b2bb90e8ea054e2b930aff7af
1 change: 1 addition & 0 deletions dev/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,6 @@ env | tee "target/buildinfo"
pushd ${FWDIR}/python
python setup.py sdist
python setup.py bdist_egg
python -m unittest discover -v
popd

3 changes: 2 additions & 1 deletion dev/dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
Sphinx>=1.3.5
pylint>=1.7.4
decorator>=4.1.2
typedecorator
pyspark>=2.2.1
typedecorator>=0.0.5
23 changes: 20 additions & 3 deletions python/variants/test/test_core.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,31 @@
import os
import unittest

from pyspark import SparkConf
from pyspark.sql import SparkSession
from pyspark.tests import ReusedPySparkTestCase

from variants import VariantsContext

THIS_DIR = os.path.dirname(os.path.abspath(__file__))

class VariantSparkAPITestCase(ReusedPySparkTestCase):

class VariantSparkPySparkTestCase(unittest.TestCase):

@classmethod
def setUpClass(self):
sconf = SparkConf(loadDefaults=False)\
.set("spark.sql.files.openCostInBytes", 53687091200L)\
.set("spark.sql.files.maxPartitionBytes", 53687091200L)
spark = SparkSession.builder.config(conf=sconf)\
.appName("test").master("local").getOrCreate()
self.sc = spark.sparkContext

@classmethod
def tearDownClass(self):
self.sc.stop()


class VariantSparkAPITestCase(VariantSparkPySparkTestCase):

def setUp(self):
self.spark = SparkSession(self.sc)
Expand All @@ -34,7 +51,7 @@ def test_importance_analysis_from_vcf(self):
self.assertEqual('22_16050408',
str(df.orderBy('importance', ascending=False).collect()[0][0]))
oob_error = imp_analysis.oob_error()
self.assertAlmostEqual(0.014652014652014652, oob_error, 3)
self.assertAlmostEqual(0.016483516483516484, oob_error, 4)


if __name__ == '__main__':
Expand Down