Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions native/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions native/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ async-trait = { version = "0.1" }
bytes = { version = "1.10.0" }
parquet = { version = "57.2.0", default-features = false, features = ["experimental"] }
datafusion = { version = "51.0.0", default-features = false, features = ["unicode_expressions", "crypto_expressions", "nested_expressions", "parquet"] }
datafusion-common = { version = "51.0.0"}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this too ?

datafusion-datasource = { version = "51.0.0" }
datafusion-spark = { version = "51.0.0" }
datafusion-comet-spark-expr = { path = "spark-expr" }
Expand Down
1 change: 1 addition & 0 deletions native/spark-expr/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ edition = { workspace = true }
arrow = { workspace = true }
chrono = { workspace = true }
datafusion = { workspace = true }
datafusion-common = { workspace = true }
chrono-tz = { workspace = true }
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@rafafrdz , this might not be intended change ?

num = { workspace = true }
regex = { workspace = true }
Expand Down
5 changes: 5 additions & 0 deletions native/spark-expr/src/comet_scalar_funcs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
// specific language governing permissions and limitations
// under the License.

use crate::datetime_funcs::to_timestamp;
use crate::hash_funcs::*;
use crate::math_funcs::abs::abs;
use crate::math_funcs::checked_arithmetic::{checked_add, checked_div, checked_mul, checked_sub};
Expand Down Expand Up @@ -177,6 +178,10 @@ pub fn create_comet_physical_fun_with_eval_mode(
let func = Arc::new(spark_modulo);
make_comet_scalar_udf!("spark_modulo", func, without data_type, fail_on_error)
}
"to_timestamp" => {
let func = Arc::new(to_timestamp);
make_comet_scalar_udf!("to_timestamp", func, without data_type)
}
"abs" => {
let func = Arc::new(abs);
make_comet_scalar_udf!("abs", func, without data_type)
Expand Down
2 changes: 2 additions & 0 deletions native/spark-expr/src/datetime_funcs/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ mod date_diff;
mod date_trunc;
mod extract_date_part;
mod timestamp_trunc;
mod to_timestamp;
mod unix_timestamp;

pub use date_diff::SparkDateDiff;
Expand All @@ -27,4 +28,5 @@ pub use extract_date_part::SparkHour;
pub use extract_date_part::SparkMinute;
pub use extract_date_part::SparkSecond;
pub use timestamp_trunc::TimestampTruncExpr;
pub use to_timestamp::to_timestamp;
pub use unix_timestamp::SparkUnixTimestamp;
Loading