1515from delphi_utils import Nans
1616from delphi .epidata .client .delphi_epidata import Epidata
1717from delphi .epidata .acquisition .covidcast .csv_to_database import main
18- from delphi .epidata .acquisition .covidcast .dbjobs_runner import main as dbjobs_main
1918import delphi .operations .secrets as secrets
2019
2120# py3tester coverage target (equivalent to `import *`)
@@ -37,9 +36,9 @@ def setUp(self):
3736 cur = cnx .cursor ()
3837
3938 # clear all tables
40- cur .execute ("truncate table signal_load " )
41- cur .execute ("truncate table signal_history " )
42- cur .execute ("truncate table signal_latest " )
39+ cur .execute ("truncate table epimetric_load " )
40+ cur .execute ("truncate table epimetric_full " )
41+ cur .execute ("truncate table epimetric_latest " )
4342 cur .execute ("truncate table geo_dim" )
4443 cur .execute ("truncate table signal_dim" )
4544 # reset the `covidcast_meta_cache` table (it should always have one row)
@@ -79,9 +78,9 @@ def apply_lag(expected_epidata):
7978
8079 def verify_timestamps_and_defaults (self ):
8180 self .cur .execute ('''
82- select value_updated_timestamp from signal_history
81+ select value_updated_timestamp from epimetric_full
8382UNION ALL
84- select value_updated_timestamp from signal_latest ''' )
83+ select value_updated_timestamp from epimetric_latest ''' )
8584 for (value_updated_timestamp ,) in self .cur :
8685 self .assertGreater (value_updated_timestamp , 0 )
8786
@@ -102,8 +101,6 @@ def test_uploading(self):
102101 log_file = log_file_directory +
103102 "output.log" ,
104103 data_dir = data_dir ,
105- is_wip_override = False ,
106- not_wip_override = False ,
107104 specific_issue_date = False )
108105 uploader_column_rename = {"geo_id" : "geo_value" , "val" : "value" , "se" : "stderr" , "missing_val" : "missing_value" , "missing_se" : "missing_stderr" }
109106
@@ -123,7 +120,6 @@ def test_uploading(self):
123120
124121 # upload CSVs
125122 main (args )
126- dbjobs_main ()
127123 response = Epidata .covidcast ('src-name' , signal_name , 'day' , 'state' , 20200419 , '*' )
128124
129125 expected_values = pd .concat ([values , pd .DataFrame ({ "time_value" : [20200419 ] * 3 , "signal" : [signal_name ] * 3 , "direction" : [None ] * 3 })], axis = 1 ).rename (columns = uploader_column_rename ).to_dict (orient = "records" )
@@ -152,7 +148,6 @@ def test_uploading(self):
152148
153149 # upload CSVs
154150 main (args )
155- dbjobs_main ()
156151 response = Epidata .covidcast ('src-name' , signal_name , 'day' , 'state' , 20200419 , '*' )
157152
158153 expected_values = pd .concat ([values , pd .DataFrame ({
@@ -187,7 +182,6 @@ def test_uploading(self):
187182
188183 # upload CSVs
189184 main (args )
190- dbjobs_main ()
191185 response = Epidata .covidcast ('src-name' , signal_name , 'day' , 'state' , 20200419 , '*' )
192186
193187 expected_response = {'result' : - 2 , 'message' : 'no results' }
@@ -213,7 +207,6 @@ def test_uploading(self):
213207
214208 # upload CSVs
215209 main (args )
216- dbjobs_main ()
217210 response = Epidata .covidcast ('src-name' , signal_name , 'day' , 'state' , 20200419 , '*' )
218211
219212 expected_values_df = pd .concat ([values , pd .DataFrame ({
@@ -232,42 +225,6 @@ def test_uploading(self):
232225 self .setUp ()
233226
234227
235- with self .subTest ("Valid wip" ):
236- values = pd .DataFrame ({
237- "geo_id" : ["me" , "nd" , "wa" ],
238- "val" : [10.0 , 20.0 , 30.0 ],
239- "se" : [0.01 , 0.02 , 0.03 ],
240- "sample_size" : [100.0 , 200.0 , 300.0 ],
241- "missing_val" : [Nans .NOT_MISSING ] * 3 ,
242- "missing_se" : [Nans .NOT_MISSING ] * 3 ,
243- "missing_sample_size" : [Nans .NOT_MISSING ] * 3
244- })
245- signal_name = "wip_prototype"
246- values .to_csv (source_receiving_dir + f'/20200419_state_{ signal_name } .csv' , index = False )
247-
248- # upload CSVs
249- main (args )
250- dbjobs_main ()
251- response = Epidata .covidcast ('src-name' , signal_name , 'day' , 'state' , 20200419 , '*' )
252-
253- expected_values = pd .concat ([values , pd .DataFrame ({
254- "time_value" : [20200419 ] * 3 ,
255- "signal" : [signal_name ] * 3 ,
256- "direction" : [None ] * 3
257- })], axis = 1 ).rename (columns = uploader_column_rename ).to_dict (orient = "records" )
258- expected_response = {'result' : 1 , 'epidata' : self .apply_lag (expected_values ), 'message' : 'success' }
259-
260- self .assertEqual (response , expected_response )
261- self .verify_timestamps_and_defaults ()
262-
263- # Verify that files were archived
264- path = data_dir + f'/archive/successful/src-name/20200419_state_wip_prototype.csv.gz'
265- self .assertIsNotNone (os .stat (path ))
266-
267- self .tearDown ()
268- self .setUp ()
269-
270-
271228 with self .subTest ("Valid signal with name length 32<x<64" ):
272229 values = pd .DataFrame ({
273230 "geo_id" : ["pa" ],
@@ -278,12 +235,11 @@ def test_uploading(self):
278235 "missing_se" : [Nans .NOT_MISSING ],
279236 "missing_sample_size" : [Nans .NOT_MISSING ]
280237 })
281- signal_name = "wip_really_long_name_that_will_be_accepted "
238+ signal_name = "really_long_name_that_will_be_accepted "
282239 values .to_csv (source_receiving_dir + f'/20200419_state_{ signal_name } .csv' , index = False )
283240
284241 # upload CSVs
285242 main (args )
286- dbjobs_main ()
287243 response = Epidata .covidcast ('src-name' , signal_name , 'day' , 'state' , 20200419 , '*' )
288244
289245 expected_values = pd .concat ([values , pd .DataFrame ({
@@ -310,12 +266,11 @@ def test_uploading(self):
310266 "missing_se" : [Nans .NOT_MISSING ],
311267 "missing_sample_size" : [Nans .NOT_MISSING ]
312268 })
313- signal_name = "wip_really_long_name_that_will_get_truncated_lorem_ipsum_dolor_sit_amet "
269+ signal_name = "really_long_name_that_will_get_truncated_lorem_ipsum_dolor_sit_amet "
314270 values .to_csv (source_receiving_dir + f'/20200419_state_{ signal_name } .csv' , index = False )
315271
316272 # upload CSVs
317273 main (args )
318- dbjobs_main ()
319274 response = Epidata .covidcast ('src-name' , signal_name , 'day' , 'state' , 20200419 , '*' )
320275
321276 expected_response = {'result' : - 2 , 'message' : 'no results' }
@@ -332,7 +287,6 @@ def test_uploading(self):
332287 f .write ('this,header,is,wrong\n ' )
333288
334289 main (args )
335- dbjobs_main ()
336290
337291 path = data_dir + '/archive/failed/src-name/20200420_state_test.csv'
338292 self .assertIsNotNone (os .stat (path ))
@@ -346,7 +300,6 @@ def test_uploading(self):
346300 f .write ('file name is wrong\n ' )
347301
348302 main (args )
349- dbjobs_main ()
350303
351304 path = data_dir + '/archive/failed/unknown/hello.csv'
352305 self .assertIsNotNone (os .stat (path ))
0 commit comments