Skip to content

Commit

Permalink
Merge branch 'release'
Browse files Browse the repository at this point in the history
  • Loading branch information
Axect committed Nov 4, 2022
2 parents a7951e0 + c6adc5d commit 619090b
Show file tree
Hide file tree
Showing 7 changed files with 135 additions and 7 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ true/
*.parquet
example_data/*.csv
example_data/*.nc
example_data/*.parquet
*.mm_profdata

src/bin/*
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "peroxide"
version = "0.32.0"
version = "0.32.1"
authors = ["axect <axect@outlook.kr>"]
edition = "2018"
description = "Rust comprehensive scientific computation library contains linear algebra, numerical analysis, statistics and machine learning tools with farmiliar syntax"
Expand Down
6 changes: 6 additions & 0 deletions RELEASES.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
# Release 0.32.1 (2022-11-04)

* Make an option for choosing compression method for parquet
* At `fuga` : `fn write_parquet(&self, path: &str, compression: CompressionOptions)`
* At `prelude` : `fn write_parquet(&self, path:&str)` (Default: `CompressionOptions::Uncompressed`)

# Release 0.32.0 (2022-11-03)

## DataFrame meets Parquet
Expand Down
47 changes: 46 additions & 1 deletion src/fuga/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
//! extern crate peroxide;
//! use peroxide::fuga::*;
//!
//! // Then you can use everyting in peroxide.
//! // Then you can use everything in peroxide.
//! ```
//!
//! # Compare with `prelude`
Expand Down Expand Up @@ -103,6 +103,48 @@
//! a.solve(&b).print(); // [1, 1]
//! }
//! ```
//!
//! * DataFrame with Parquet
//!
//! ```
//! extern crate peroxide;
//! use peroxide::fuga::*;
//!
//! fn main() {
//! let x = seq(0, 1, 0.1);
//! let y = x.fmap(|t| t.powi(2));
//!
//! let mut df = DataFrame::new(vec![]);
//! df.push("x", Series::new(x));
//! df.push("y", Series::new(y));
//!
//! df.print();
//!
//! # #[cfg(feature="parquet")] {
//! df.write_parquet("example_data/test.parquet", CompressionOptions::Uncompressed).unwrap();
//! # }
//! }
//! ```
//!
//! ```
//! extern crate peroxide;
//! use peroxide::prelude::*;
//!
//! fn main() {
//! let x = seq(0, 1, 0.1);
//! let y = x.fmap(|t| t.powi(2));
//!
//! let mut df = DataFrame::new(vec![]);
//! df.push("x", Series::new(x));
//! df.push("y", Series::new(y));
//!
//! df.print();
//!
//! # #[cfg(feature="parquet")] {
//! df.write_parquet("example_data/test.parquet").unwrap();
//! # }
//! }
//! ```

#[allow(unused_imports)]
pub use crate::macros::{julia_macro::*, matlab_macro::*, r_macro::*};
Expand Down Expand Up @@ -175,3 +217,6 @@ pub use crate::structure::matrix::{
pub use crate::structure::dataframe::DType::*;
pub use crate::structure::ad::AD::*;
pub use crate::numerical::spline::SlopeMethod::{Akima, Quadratic};

#[cfg(feature="parquet")]
pub use arrow2::io::parquet::write::CompressionOptions;
54 changes: 53 additions & 1 deletion src/prelude/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,48 @@
//! a.solve(&b).print(); // [1, 1]
//! }
//! ```
//!
//! * DataFrame with Parquet
//!
//! ```
//! extern crate peroxide;
//! use peroxide::fuga::*;
//!
//! fn main() {
//! let x = seq(0, 1, 0.1);
//! let y = x.fmap(|t| t.powi(2));
//!
//! let mut df = DataFrame::new(vec![]);
//! df.push("x", Series::new(x));
//! df.push("y", Series::new(y));
//!
//! df.print();
//!
//! # #[cfg(feature="parquet")] {
//! df.write_parquet("example_data/test.parquet", CompressionOptions::Uncompressed).unwrap();
//! # }
//! }
//! ```
//!
//! ```
//! extern crate peroxide;
//! use peroxide::prelude::*;
//!
//! fn main() {
//! let x = seq(0, 1, 0.1);
//! let y = x.fmap(|t| t.powi(2));
//!
//! let mut df = DataFrame::new(vec![]);
//! df.push("x", Series::new(x));
//! df.push("y", Series::new(y));
//!
//! df.print();
//!
//! # #[cfg(feature="parquet")] {
//! df.write_parquet("example_data/test.parquet").unwrap();
//! # }
//! }
//! ```

#[allow(unused_imports)]
pub use crate::macros::{julia_macro::*, matlab_macro::*, r_macro::*};
Expand Down Expand Up @@ -129,9 +171,16 @@ pub use crate::structure::{
},
polynomial::{Polynomial,poly,Calculus,lagrange_polynomial,legendre_polynomial},
vector::*,
dataframe::*,
dataframe::{
DataFrame, DType, DTypeArray, DTypeValue, Series, Scalar, TypedScalar, TypedVector
},
//complex::C64,
};
#[cfg(feature="csv")]
pub use crate::structure::dataframe::WithCSV;

#[cfg(feature="nc")]
pub use crate::structure::dataframe::WithNetCDF;

pub use simpler::{solve, SimplerLinearAlgebra};

Expand All @@ -156,3 +205,6 @@ pub use crate::numerical::{
};

pub use simpler::{eigen, integrate, chebyshev_polynomial, cubic_hermite_spline};

#[cfg(feature="parquet")]
pub use simpler::SimpleParquet;
23 changes: 23 additions & 0 deletions src/prelude/simpler.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::error::Error;
use arrow2::io::parquet::write::CompressionOptions;
use crate::numerical::{
eigen,
eigen::{Eigen, EigenMethod::Jacobi},
Expand All @@ -6,9 +8,12 @@ use crate::numerical::{
spline,
spline::{CubicHermiteSpline, SlopeMethod::Quadratic},
};
use crate::prelude::DataFrame;
use crate::structure::matrix::{self, Matrix};
use crate::structure::polynomial;
use crate::traits::math::{Norm, Normed};
#[cfg(feature="parquet")]
use crate::structure::dataframe::WithParquet;

/// Simple Norm
pub trait SimpleNorm: Normed {
Expand Down Expand Up @@ -142,4 +147,22 @@ pub fn chebyshev_polynomial(n: usize) -> polynomial::Polynomial {

pub fn cubic_hermite_spline(node_x: &[f64], node_y: &[f64]) -> CubicHermiteSpline {
spline::cubic_hermite_spline(node_x, node_y, Quadratic)
}

/// Simple handle parquet
#[cfg(feature="parquet")]
pub trait SimpleParquet: Sized {
fn write_parquet(&self, path: &str) -> Result<(), Box<dyn Error>>;
fn read_parquet(path: &str) -> Result<Self, Box<dyn Error>>;
}

#[cfg(feature="parquet")]
impl SimpleParquet for DataFrame {
fn write_parquet(&self, path: &str) -> Result<(), Box<dyn Error>> {
WithParquet::write_parquet(self, path, CompressionOptions::Uncompressed)
}

fn read_parquet(path: &str) -> Result<Self, Box<dyn Error>> {
WithParquet::read_parquet(path)
}
}
9 changes: 5 additions & 4 deletions src/structure/dataframe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@
//! df.push("a", Series::new(vec!['x', 'y', 'z']));
//! df.push("b", Series::new(vec![0, 1, 2]));
//! df.push("c", Series::new(c!(0.1, 0.2, 0.3)));
//! df.write_parquet("example_data/doc_pq.parquet")?;
//! df.write_parquet("example_data/doc_pq.parquet", CompressionOptions::Uncompressed)?;
//!
//! // Read parquet
//! let mut dg = DataFrame::read_parquet("example_data/doc_pq.parquet")?;
Expand Down Expand Up @@ -1832,17 +1832,18 @@ impl WithNetCDF for DataFrame {
}
}

/// To handle parquet format
#[cfg(feature="parquet")]
pub trait WithParquet {
fn write_parquet(&self, file_path: &str) -> Result<(), Box<dyn Error>>;
fn write_parquet(&self, file_path: &str, compression: CompressionOptions) -> Result<(), Box<dyn Error>>;
fn read_parquet(file_path: &str) -> Result<Self, Box<dyn Error>> where Self: Sized;
// fn read_parquet_by_header(file_path: &str, header: Vec<&str>) -> Result<Self, Box<dyn Error>> where Self: Sized;
}

#[cfg(feature="parquet")]
impl WithParquet for DataFrame {
/// Write DataFrame to parquet
fn write_parquet(&self, file_path: &str) -> Result<(), Box<dyn Error>> {
fn write_parquet(&self, file_path: &str, compression: CompressionOptions) -> Result<(), Box<dyn Error>> {
let file = std::fs::File::create(file_path)?;

let mut schema_vec = vec![];
Expand All @@ -1864,7 +1865,7 @@ impl WithParquet for DataFrame {
let encodings = (0 .. l).map(|_| vec![Encoding::Plain]).collect::<Vec<_>>();
let options = WriteOptions {
write_statistics: true,
compression: CompressionOptions::Snappy,
compression,
version: Version::V2,
};

Expand Down

0 comments on commit 619090b

Please sign in to comment.