Skip to content

Commit

Permalink
Added a code snippet to selectively load the NVIDIA IndeX plugin, and…
Browse files Browse the repository at this point in the history
… fixed an

issue in the script to start the Dask-MPI cluster

Signed-off-by: Nick Leaf <[email protected]>
  • Loading branch information
nleaf-nv committed Sep 17, 2020
1 parent 074d548 commit 3f83418
Show file tree
Hide file tree
Showing 2 changed files with 64 additions and 55 deletions.
115 changes: 62 additions & 53 deletions notebooks/Dask-MPI_Volume_Render.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -110,59 +110,68 @@
"source": [
"# Define a function for remote execution that will set up the ParaView state\n",
"def workerState(self, arr):\n",
" import numpy as np\n",
" import vtk\n",
" from vtk.util import numpy_support as vtknp\n",
" \n",
" #Use the undocumented block slicing to get the block for this rank\n",
" wdims = arr.shape[::-1]\n",
" ar = arr.blocks[self.rank].compute()\n",
" dims = ar.shape[::-1]\n",
" print(\"Rank\", self.rank, \"has array with local/global dims\", dims, wdims)\n",
" ar = np.reshape(ar, dims[0]*dims[1]*dims[2])\n",
" \n",
" ext = [0,dims[0]-1, 0,dims[1]-1, max(self.rank*dims[2]-1,0),(self.rank+1)*dims[2]-1]\n",
" wext = [0,wdims[0]-1, 0,wdims[1]-1, 0,wdims[2]-1]\n",
"\n",
" vtkimg = vtk.vtkImageData()\n",
" vtkimg.Initialize()\n",
" vtkimg.SetExtent(ext)\n",
" vtkimg.SetSpacing([1,1,1])\n",
" \n",
" #set the extent for the whole dataset\n",
" vi = vtk.vtkInformation()\n",
" vtkimg.CopyInformationToPipeline(vi)\n",
" vi.Set(vtk.vtkStreamingDemandDrivenPipeline.WHOLE_EXTENT(), wext[0],wext[1],wext[2],wext[3],wext[4],wext[5])\n",
" vtkimg.CopyInformationFromPipeline(vi)\n",
"\n",
" varnm = 'E' #'E' is entropy for this data\n",
" vtkarr = vtknp.numpy_to_vtk(ar)\n",
" vtkarr.SetName(varnm)\n",
" vtkimg.GetPointData().AddArray(vtkarr)\n",
" vtkimg.GetPointData().SetScalars(vtkarr)\n",
"\n",
" self.TP = self.pvs.TrivialProducer()\n",
" self.TP.GetClientSideObject().SetOutput(vtkimg)\n",
" self.TP.UpdatePipeline()\n",
" \n",
" #initializae some renderer settings\n",
" self.renv.ViewSize = [800, 500]\n",
" self.renv.CameraPosition = [650,0,0]\n",
" self.renv.Background = [0.0, 0.0, 0.0]\n",
"\n",
" #create a display object for the data, and set it to volume render\n",
" self.TPDisplay = self.pvs.Show(self.TP, self.renv)\n",
" ePWF,eLUT = self.pvs.GetOpacityTransferFunction(varnm), self.pvs.GetColorTransferFunction(varnm)\n",
" eLUT.RGBPoints = [3.0241666020214752e-15, 0.0392156862745098, 1.0, 0.9686274509803922, 0.05988497659564321, 0.0392156862745098, 1.0, 0.9686274509803922, 0.06215288117527962, 0.0, 0.0, 0.0, 0.06337877362966537, 0.0, 0.0, 0.0, 0.06871142238378525, 0.901960784314, 0.0, 0.0, 0.0716535672545433, 0.901960784314, 0.901960784314, 0.0, 0.08403510600328445, 0.9882352941176471, 0.9882352941176471, 0.9882352941176471, 0.11376306414604187, 1.0, 1.0, 1.0]\n",
" eLUT.ColorSpace = 'RGB'\n",
" ePWF.Points = [3.0241666020214752e-15, 0.0, 0.5, 0.0, 0.032547514885663986, 0.0, 0.5, 0.0, 0.03309916704893112, 0.3529411852359772, 0.5, 0.0, 0.03346693515777588, 0.0, 0.5, 0.0, 0.06215288117527962, 0.0, 0.5, 0.0, 0.06779199838638306, 0.05882352963089943, 0.8863638639450073, 0.0, 0.07698621600866318, 0.11029411852359772, 0.5, 0.0, 0.08078648895025253, 0.04411764815449715, 0.5, 0.0, 0.08244144916534424, 0.4852941334247589, 0.5, 0.0, 0.08378992974758148, 0.0, 0.5, 0.0, 0.08746761322713148, 0.0, 0.5, 0.0, 0.09617146849632263, 0.0, 0.5, 0.0, 0.10965631902217865, 0.4117647111415863, 0.5, 0.0, 0.11376306414604187, 1.0, 0.5, 0.0]\n",
"\n",
" # trace defaults for the display properties.\n",
" import numpy as np\n",
" import vtk\n",
" from vtk.util import numpy_support as vtknp\n",
"\n",
" useIndex = True\n",
"\n",
" if useIndex:\n",
" self.pvs.LoadPlugin('/usr/local/paraview/lib/paraview-5.8/plugins/pvNVIDIAIndeX/pvNVIDIAIndeX.so', remote=False, ns=globals())\n",
"\n",
" #Use the undocumented block slicing to get the block for this rank\n",
" wdims = arr.shape[::-1]\n",
" ar = arr.blocks[self.rank].compute()\n",
" dims = ar.shape[::-1]\n",
" print(\"Rank\", self.rank, \"has array with local/global dims\", dims, wdims)\n",
" ar = np.reshape(ar, dims[0]*dims[1]*dims[2])\n",
"\n",
" ext = [0,dims[0]-1, 0,dims[1]-1, max(self.rank*dims[2]-1,0),(self.rank+1)*dims[2]-1]\n",
" wext = [0,wdims[0]-1, 0,wdims[1]-1, 0,wdims[2]-1]\n",
"\n",
" vtkimg = vtk.vtkImageData()\n",
" vtkimg.Initialize()\n",
" vtkimg.SetExtent(ext)\n",
" vtkimg.SetSpacing([1,1,1])\n",
"\n",
" #set the extent for the whole dataset\n",
" vi = vtk.vtkInformation()\n",
" vtkimg.CopyInformationToPipeline(vi)\n",
" vi.Set(vtk.vtkStreamingDemandDrivenPipeline.WHOLE_EXTENT(), wext[0],wext[1],wext[2],wext[3],wext[4],wext[5])\n",
" vtkimg.CopyInformationFromPipeline(vi)\n",
"\n",
" varnm = 'E' #'E' is entropy for this data\n",
" vtkarr = vtknp.numpy_to_vtk(ar)\n",
" vtkarr.SetName(varnm)\n",
" vtkimg.GetPointData().AddArray(vtkarr)\n",
" vtkimg.GetPointData().SetScalars(vtkarr)\n",
"\n",
" self.TP = self.pvs.TrivialProducer()\n",
" self.TP.GetClientSideObject().SetOutput(vtkimg)\n",
" self.TP.UpdatePipeline()\n",
"\n",
" #initializae some renderer settings\n",
" self.renv.ViewSize = [800, 500]\n",
" self.renv.CameraPosition = [650,0,0]\n",
" self.renv.Background = [0.0, 0.0, 0.0]\n",
"\n",
" #create a display object for the data, and set it to volume render\n",
" self.TPDisplay = self.pvs.Show(self.TP, self.renv)\n",
" ePWF,eLUT = self.pvs.GetOpacityTransferFunction(varnm), self.pvs.GetColorTransferFunction(varnm)\n",
" eLUT.RGBPoints = [3.0241666020214752e-15, 0.0392156862745098, 1.0, 0.9686274509803922, 0.05988497659564321, 0.0392156862745098, 1.0, 0.9686274509803922, 0.06215288117527962, 0.0, 0.0, 0.0, 0.06337877362966537, 0.0, 0.0, 0.0, 0.06871142238378525, 0.901960784314, 0.0, 0.0, 0.0716535672545433, 0.901960784314, 0.901960784314, 0.0, 0.08403510600328445, 0.9882352941176471, 0.9882352941176471, 0.9882352941176471, 0.11376306414604187, 1.0, 1.0, 1.0]\n",
" eLUT.ColorSpace = 'RGB'\n",
" ePWF.Points = [3.0241666020214752e-15, 0.0, 0.5, 0.0, 0.032547514885663986, 0.0, 0.5, 0.0, 0.03309916704893112, 0.3529411852359772, 0.5, 0.0, 0.03346693515777588, 0.0, 0.5, 0.0, 0.06215288117527962, 0.0, 0.5, 0.0, 0.06779199838638306, 0.05882352963089943, 0.8863638639450073, 0.0, 0.07698621600866318, 0.11029411852359772, 0.5, 0.0, 0.08078648895025253, 0.04411764815449715, 0.5, 0.0, 0.08244144916534424, 0.4852941334247589, 0.5, 0.0, 0.08378992974758148, 0.0, 0.5, 0.0, 0.08746761322713148, 0.0, 0.5, 0.0, 0.09617146849632263, 0.0, 0.5, 0.0, 0.10965631902217865, 0.4117647111415863, 0.5, 0.0, 0.11376306414604187, 1.0, 0.5, 0.0]\n",
"\n",
" # trace defaults for the display properties.\n",
" if useIndex:\n",
" self.TPDisplay.Representation = 'NVIDIA IndeX'\n",
" else:\n",
" self.TPDisplay.Representation = 'Volume'\n",
" self.TPDisplay.ColorArrayName = ['POINTS', varnm]\n",
" self.TPDisplay.LookupTable = self.pvs.GetColorTransferFunction(varnm)\n",
" self.TPDisplay.OpacityArray = ['POINTS', varnm]\n",
" self.TPDisplay.ScalarOpacityFunction = self.pvs.GetOpacityTransferFunction(varnm)\n",
"\n",
" self.TPDisplay.ColorArrayName = ['POINTS', varnm]\n",
" self.TPDisplay.LookupTable = self.pvs.GetColorTransferFunction(varnm)\n",
" self.TPDisplay.OpacityArray = ['POINTS', varnm]\n",
" self.TPDisplay.ScalarOpacityFunction = self.pvs.GetOpacityTransferFunction(varnm)\n",
"\n",
"\n",
"# Submit the setup function for execution on Dask workers\n",
Expand Down Expand Up @@ -204,7 +213,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.4"
"version": "3.7.6"
}
},
"nbformat": 4,
Expand Down
4 changes: 2 additions & 2 deletions scripts/runCluster.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ fi
mkdir -p dask-worker-space

CLEAN_CMD='rm -rf /tmp/scheduler.json *.lock dask-worker-space/*' #cleans up leftover lock files from last run
SCHED_CMD='mpiexec -n 1 dask-mpi --scheduler-file /tmp/scheduler.json --no-nanny --local-directory dask-worker-space' #executes one scheduler process
WORK_CMD="mpiexec -n $N dask-mpi --scheduler-file /tmp/scheduler.json --no-nanny --local-directory dask-worker-space --no-scheduler" #executes $N workers, connects them to scheduler
SCHED_CMD='mpiexec --allow-run-as-root -n 1 dask-mpi --scheduler-file /tmp/scheduler.json --no-nanny --local-directory dask-worker-space' #executes one scheduler process
WORK_CMD="mpiexec --allow-run-as-root -n $N dask-mpi --scheduler-file /tmp/scheduler.json --no-nanny --local-directory dask-worker-space --no-scheduler" #executes $N workers, connects them to scheduler

$CLEAN_CMD ; $SCHED_CMD & $WORK_CMD ; fg

0 comments on commit 3f83418

Please sign in to comment.