Skip to content

Commit

Permalink
Fix examples (#236)
Browse files Browse the repository at this point in the history
  • Loading branch information
dvadym authored Jan 31, 2022
1 parent 24d8a83 commit a69143b
Show file tree
Hide file tree
Showing 6 changed files with 19 additions and 4 deletions.
2 changes: 1 addition & 1 deletion examples/movie_view_ratings/run_all_frameworks.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
In order to run an example:
1. Install Python and run on the command line `pip install numpy apache-beam pyspark absl-py`
1. Install Python and run on the command line `pip install pipeline-dp apache-beam pyspark absl-py`
2. Download the Netflix prize dataset from https://www.kaggle.com/netflix-inc/netflix-prize-data and unpack it.
3. The dataset itself is pretty big, to speed up the run it's better to use a
part of it. You can get a part of it by running in bash:
Expand Down
5 changes: 5 additions & 0 deletions examples/movie_view_ratings/run_on_beam.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
""" Demo of PipelineDP with Apache Beam.
For running:
1. Install Python and run on the command line `pip install pipeline-dp apache-beam absl-py`
2. Run python python run_on_beam.py --input_file=<path to data.txt from 3> --output_file=<...>
"""

from absl import app
Expand Down
4 changes: 4 additions & 0 deletions examples/movie_view_ratings/run_on_spark.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
""" Demo of PipelineDP with Spark.
For running:
1. Install Python and run on the command line `pip install pipeline-dp pyspark absl-py`
2. Run python python run_on_beam.py --input_file=<path to data.txt from 3> --output_file=<...>
"""

from absl import app
Expand Down
5 changes: 4 additions & 1 deletion examples/restaraunt_visits/run_on_beam.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
""" Demo of PipelineDP with Apache Beam.
For running:
1. Install Python and run on the command line `pip install pipeline-dp apache-beam absl-py`
2. Run python python run_on_beam.py --input_file=<path to data.txt from 3> --output_file=<...>
"""

from absl import app
Expand All @@ -21,7 +25,6 @@
import pipeline_dp
from pipeline_dp import private_beam
from pipeline_dp import SumParams
from pipeline_dp.private_beam import MakePrivate
import pandas as pd

FLAGS = flags.FLAGS
Expand Down
6 changes: 5 additions & 1 deletion examples/restaraunt_visits/run_without_frameworks.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Demo of running PipelineDP locally, without any external data processing framework"""
""" Demo of running PipelineDP locally, without any external data processing framework
1. Install Python and run on the command line `pip install pipeline-dp absl-py`
2. Run python python run_without_frameworks.py --input_file=<path to data.txt from 3> --output_file=<...>
"""

from absl import app
from absl import flags
Expand Down
1 change: 0 additions & 1 deletion examples/restaurant_visits.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,6 @@
"from apache_beam.runners.portability import fn_api_runner\n",
"from apache_beam.runners.interactive import interactive_runner\n",
"from apache_beam.runners.interactive.interactive_beam import *\n",
"import pyspark\n",
"from dataclasses import dataclass\n",
"import pipeline_dp\n",
"\n",
Expand Down

0 comments on commit a69143b

Please sign in to comment.