diff --git a/python/distrdf/common/check_rungraphs.py b/python/distrdf/common/check_rungraphs.py index 4fb9009e72..b6d3c85f0b 100644 --- a/python/distrdf/common/check_rungraphs.py +++ b/python/distrdf/common/check_rungraphs.py @@ -4,7 +4,6 @@ import ROOT -import DistRDF from DistRDF.Backends import Spark from DistRDF.Backends import Dask @@ -46,7 +45,7 @@ def test_rungraphs_sparkanddask_3histos(self, connection): for proxy in histoproxies: assert proxy.proxied_node.value is None - DistRDF.RunGraphs(histoproxies) + ROOT.RDF.RunGraphs(histoproxies) # After RunGraphs all histograms are correctly assigned to the # node objects diff --git a/python/distrdf/dask/check_rungraphs.py b/python/distrdf/dask/check_rungraphs.py index 5fe348992d..82ffe232b8 100644 --- a/python/distrdf/dask/check_rungraphs.py +++ b/python/distrdf/dask/check_rungraphs.py @@ -4,7 +4,6 @@ import ROOT -import DistRDF from DistRDF.Backends import Dask @@ -36,7 +35,7 @@ def test_rungraphs_dask_3histos(self, connection): for proxy in histoproxies: assert proxy.proxied_node.value is None - DistRDF.RunGraphs(histoproxies) + ROOT.RDF.RunGraphs(histoproxies) # After RunGraphs all histograms are correctly assigned to the # node objects diff --git a/python/distrdf/dask/check_variations.py b/python/distrdf/dask/check_variations.py index 70743d27f3..90e7104fd6 100644 --- a/python/distrdf/dask/check_variations.py +++ b/python/distrdf/dask/check_variations.py @@ -2,18 +2,17 @@ import ROOT -import DistRDF from DistRDF.Backends import Dask +VariationsFor = ROOT.RDF.Experimental.VariationsFor class TestVariations: """Tests usage of systematic variations with Dask backend""" - def test_histo(self, connection): df = Dask.RDataFrame(10, daskclient=connection, npartitions=2).Define("x", "1") df1 = df.Vary("x", "ROOT::RVecI{-2,2}", ["down", "up"]) h = df1.Histo1D(("name", "title", 10, -10, 10), "x") - histos = DistRDF.VariationsFor(h) + histos = VariationsFor(h) expectednames = ["nominal", "x:up", "x:down"] expectedmeans = [1, 2, -2] @@ -26,7 +25,7 @@ def test_histo(self, connection): def test_graph(self, connection): df = Dask.RDataFrame(10, daskclient=connection, npartitions=2).Define("x", "1") g = df.Vary("x", "ROOT::RVecI{-1, 2}", nVariations=2).Graph("x", "x") - gs = DistRDF.VariationsFor(g) + gs = VariationsFor(g) assert g.GetMean() == 1 @@ -41,7 +40,7 @@ def test_mixed(self, connection): df = Dask.RDataFrame(10, daskclient=connection, npartitions=2).Define("x", "1").Define("y", "42") h = df.Vary("x", "ROOT::RVecI{-1, 2}", variationTags=["down", "up"]).Histo1D(("name", "title", 10, -500, 500), "x", "y") - histos = DistRDF.VariationsFor(h) + histos = VariationsFor(h) expectednames = ["nominal", "x:down", "x:up"] expectedmeans = [1, -1, 2] @@ -57,7 +56,7 @@ def test_simultaneous(self, connection): h = df.Vary(["x", "y"], "ROOT::RVec{{-1, 2, 3}, {41, 43, 44}}", ["down", "up", "other"], "xy").Histo1D(("name", "title", 10, -500, 500), "x", "y") - histos = DistRDF.VariationsFor(h) + histos = VariationsFor(h) expectednames = ["nominal", "xy:down", "xy:up", "xy:other"] expectedmeans = [1, -1, 2, 3] @@ -74,7 +73,7 @@ def test_varyfiltersum(self, connection): assert df_sum.GetValue() == 10 - sums = DistRDF.VariationsFor(df_sum) + sums = VariationsFor(df_sum) expectednames = ["nominal", "myvariation:down", "myvariation:up"] expectedsums = [10, 0, 20] diff --git a/python/distrdf/spark/check_rungraphs.py b/python/distrdf/spark/check_rungraphs.py index b187084e16..5ff6c49d6b 100644 --- a/python/distrdf/spark/check_rungraphs.py +++ b/python/distrdf/spark/check_rungraphs.py @@ -4,7 +4,6 @@ import ROOT -import DistRDF from DistRDF.Backends import Spark @@ -36,7 +35,7 @@ def test_rungraphs_spark_3histos(self, connection): for proxy in histoproxies: assert proxy.proxied_node.value is None - DistRDF.RunGraphs(histoproxies) + ROOT.RDF.RunGraphs(histoproxies) # After RunGraphs all histograms are correctly assigned to the # node objects diff --git a/python/distrdf/spark/check_variations.py b/python/distrdf/spark/check_variations.py index 0fda8c619d..eeed49db33 100644 --- a/python/distrdf/spark/check_variations.py +++ b/python/distrdf/spark/check_variations.py @@ -2,18 +2,19 @@ import ROOT -import DistRDF from DistRDF.Backends import Spark +VariationsFor = ROOT.RDF.Experimental.VariationsFor class TestVariations: """Tests usage of systematic variations with Spark backend""" + VariationsFor = ROOT.RDF.Experimental.VariationsFor def test_histo(self, connection): df = Spark.RDataFrame(10, sparkcontext=connection, npartitions=2).Define("x", "1") df1 = df.Vary("x", "ROOT::RVecI{-2,2}", ["down", "up"]) h = df1.Histo1D(("name", "title", 10, -10, 10), "x") - histos = DistRDF.VariationsFor(h) + histos = VariationsFor(h) expectednames = ["nominal", "x:up", "x:down"] expectedmeans = [1, 2, -2] @@ -26,7 +27,7 @@ def test_histo(self, connection): def test_graph(self, connection): df = Spark.RDataFrame(10, sparkcontext=connection, npartitions=2).Define("x", "1") g = df.Vary("x", "ROOT::RVecI{-1, 2}", nVariations=2).Graph("x", "x") - gs = DistRDF.VariationsFor(g) + gs = VariationsFor(g) assert g.GetMean() == 1 @@ -41,7 +42,7 @@ def test_mixed(self, connection): df = Spark.RDataFrame(10, sparkcontext=connection, npartitions=2).Define("x", "1").Define("y", "42") h = df.Vary("x", "ROOT::RVecI{-1, 2}", variationTags=["down", "up"]).Histo1D(("name", "title", 10, -500, 500), "x", "y") - histos = DistRDF.VariationsFor(h) + histos = VariationsFor(h) expectednames = ["nominal", "x:down", "x:up"] expectedmeans = [1, -1, 2] @@ -57,7 +58,7 @@ def test_simultaneous(self, connection): h = df.Vary(["x", "y"], "ROOT::RVec{{-1, 2, 3}, {41, 43, 44}}", ["down", "up", "other"], "xy").Histo1D(("name", "title", 10, -500, 500), "x", "y") - histos = DistRDF.VariationsFor(h) + histos = VariationsFor(h) expectednames = ["nominal", "xy:down", "xy:up", "xy:other"] expectedmeans = [1, -1, 2, 3] @@ -74,7 +75,7 @@ def test_varyfiltersum(self, connection): assert df_sum.GetValue() == 10 - sums = DistRDF.VariationsFor(df_sum) + sums = VariationsFor(df_sum) expectednames = ["nominal", "myvariation:down", "myvariation:up"] expectedsums = [10, 0, 20]