Skip to content

Commit 1b0f99e

Browse files
author
Saul Frank
committed
update naming convention for transfers
1 parent 0a6d86c commit 1b0f99e

File tree

7 files changed

+26
-28
lines changed

7 files changed

+26
-28
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "dataplane"
7-
version = "0.0.12"
7+
version = "0.0.13"
88
authors = [
99
{ name="Saul Frank" }, {name ="Dataplane, Inc."}
1010
]

src/dataplane/Microsoft/Teams/webhook_send.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,3 @@
1-
import requests
2-
import os
3-
from datetime import datetime
4-
import json
5-
61
""" Url: Webhook url generated by Teams
72
Message: Json message to send
83
ProxyUse: Whether to use a proxy, true or false
@@ -11,6 +6,9 @@
116
"""
127
def teams_webhook_send(Url, Message, ProxyUse=False, ProxyUrl="", ProxyMethod="https"):
138

9+
import requests
10+
from datetime import datetime
11+
1412
# Start the timer
1513
start = datetime.now()
1614

src/dataplane/__init__.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
1-
from dataplane.pipelinerun.data_persist.redis_store import (
2-
pipeline_redis_store,
3-
pipeline_redis_get,
1+
from dataplane.pipelinerun.data_persist.pandas_redis_store import (
2+
pipeline_pandas_redis_store,
3+
pipeline_pandas_redis_get,
44
)
55

6-
from dataplane.pipelinerun.data_persist.s3_store import (
7-
pipeline_s3_get,
8-
pipeline_s3_store,
6+
from dataplane.pipelinerun.data_persist.pandas_s3_store import (
7+
pipeline_pandas_s3_get,
8+
pipeline_pandas_s3_store,
99
)
1010

1111
from dataplane.hello import (
@@ -24,10 +24,10 @@
2424
"hello",
2525

2626
# Pipeline transfers
27-
"pipeline_redis_store",
28-
"pipeline_redis_get",
29-
"pipeline_s3_get",
30-
"pipeline_s3_store",
27+
"pipeline_pandas_redis_store",
28+
"pipeline_pandas_redis_get",
29+
"pipeline_pandas_s3_get",
30+
"pipeline_pandas_s3_store",
3131

3232
# Microsoft connectors
3333
"teams_webhook_send",

src/dataplane/pipelinerun/data_persist/redis_store.py renamed to src/dataplane/pipelinerun/data_persist/pandas_redis_store.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ def RedisCheck(r):
1717
Expire: Expires the data if true.
1818
ExpireDuration: If expires is true, how much time to expire. Default 15 mins
1919
"""
20-
def pipeline_redis_store(StoreKey, DataFrame, Redis, Expire=True, ExpireDuration=timedelta(minutes=15)):
20+
def pipeline_pandas_redis_store(StoreKey, DataFrame, Redis, Expire=True, ExpireDuration=timedelta(minutes=15)):
2121

2222
import os
2323
import io
@@ -50,7 +50,7 @@ def pipeline_redis_store(StoreKey, DataFrame, Redis, Expire=True, ExpireDuration
5050
StoreKey: is the key to look up for retrieval (set with RedisStore).
5151
Redis: e.g. Redis = redis.Redis(host='redis-service', port=6379, db=0)
5252
"""
53-
def pipeline_redis_get(StoreKey, Redis):
53+
def pipeline_pandas_redis_get(StoreKey, Redis):
5454

5555
import os
5656
import io

src/dataplane/pipelinerun/data_persist/s3_store.py renamed to src/dataplane/pipelinerun/data_persist/pandas_s3_store.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
Expire: Expires the data if true.
1111
ExpireDuration: If expires is true, how much time to expire. Default 15 mins
1212
"""
13-
def pipeline_s3_store(StoreKey, DataFrame, S3Client, Bucket, Expire=True, ExpireDuration=timedelta(days=30)):
13+
def pipeline_pandas_s3_store(StoreKey, DataFrame, S3Client, Bucket, Expire=True, ExpireDuration=timedelta(days=30)):
1414

1515
import os
1616
from io import BytesIO
@@ -35,7 +35,7 @@ def pipeline_s3_store(StoreKey, DataFrame, S3Client, Bucket, Expire=True, Expire
3535
S3Client: e.g. S3Client = boto3.client(...)
3636
Bucket: Name of the s3 bucket
3737
"""
38-
def pipeline_s3_get(StoreKey, S3Client, Bucket):
38+
def pipeline_pandas_s3_get(StoreKey, S3Client, Bucket):
3939

4040
import os
4141
from io import BytesIO

src/dataplane/pipelinerun/data_persist/test_redis.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11

22
import os
3-
from .redis_store import pipeline_redis_store
4-
from .redis_store import pipeline_redis_get
3+
from .pandas_redis_store import pipeline_pandas_redis_store
4+
from .pandas_redis_store import pipeline_pandas_redis_get
55
import redis
66
from datetime import timedelta
77
from nanoid import generate
@@ -29,14 +29,14 @@ def test_redis_store():
2929
# ---------- STORE PARQUET TO REDIS ------------
3030

3131
# Store the data with key hello - run id will be attached
32-
rs = pipeline_redis_store(StoreKey="hello", DataFrame=df, Redis=redisConnect, Expire=True, ExpireDuration=timedelta(minutes=15))
32+
rs = pipeline_pandas_redis_store(StoreKey="hello", DataFrame=df, Redis=redisConnect, Expire=True, ExpireDuration=timedelta(minutes=15))
3333
print(rs)
3434
assert rs["result"]=="OK"
3535

3636
# ---------- RETRIEVE PARQUET FROM REDIS ------------
3737

3838
# Get the data
39-
rsget = pipeline_redis_get(StoreKey="hello", Redis=redisConnect)
39+
rsget = pipeline_pandas_redis_get(StoreKey="hello", Redis=redisConnect)
4040
print(rsget)
4141
df = rsget["dataframe"]
4242
print(df.shape[0])

src/dataplane/pipelinerun/data_persist/test_s3_store.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11

22
import os
3-
from .s3_store import pipeline_s3_store
4-
from .s3_store import pipeline_s3_get
3+
from .pandas_s3_store import pipeline_pandas_s3_store
4+
from .pandas_s3_store import pipeline_pandas_s3_get
55
import boto3
66
from botocore.client import Config
77
from nanoid import generate
@@ -38,14 +38,14 @@ def test_s3_store():
3838
# ---------- STORE PARQUET TO S3 ------------
3939

4040
# Store the data with key hello - run id will be attached
41-
rs = pipeline_s3_store(StoreKey="s3me", DataFrame=df, S3Client=S3Connect, Bucket=bucket, Expire=False)
41+
rs = pipeline_pandas_s3_store(StoreKey="s3me", DataFrame=df, S3Client=S3Connect, Bucket=bucket, Expire=False)
4242
print(rs)
4343
assert rs["result"]=="OK"
4444

4545
# ---------- RETRIEVE PARQUET FROM S3 ------------
4646

4747
# Get the data
48-
rsget = pipeline_s3_get(StoreKey="s3me", S3Client=S3Connect, Bucket=bucket)
48+
rsget = pipeline_pandas_s3_get(StoreKey="s3me", S3Client=S3Connect, Bucket=bucket)
4949
print(rsget)
5050
df = rsget["dataframe"]
5151
print(df.shape[0])

0 commit comments

Comments
 (0)