Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
P
pyhst2
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
3
Issues
3
List
Boards
Labels
Service Desk
Milestones
Jira
Jira
Merge Requests
1
Merge Requests
1
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Environments
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
myron
pyhst2
Commits
c4973f48
Commit
c4973f48
authored
Feb 07, 2020
by
Alessandro Mirone
Committed by
Alessandro Mirone
Feb 08, 2020
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
2020a
accelerated jp2edf Pool si blocca da rimettere a posto nuovo tentativo funziona OK
parent
f143b654
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
51 additions
and
69 deletions
+51
-69
PyHST/Parameters_module.py
PyHST/Parameters_module.py
+2
-1
PyHST/PyHST.py
PyHST/PyHST.py
+14
-11
PyHST/pyhst_jp2edf.py
PyHST/pyhst_jp2edf.py
+34
-56
setup.py
setup.py
+1
-1
No files found.
PyHST/Parameters_module.py
View file @
c4973f48
...
...
@@ -78,6 +78,7 @@ if(sys.argv[0][-12:]!="sphinx-build"):
mygpus
,
MemPerProc
,
coresperproc
,
cpusperproc
=
setCpuSet
.
setCpuSet
()
ncpus
=
coresperproc
else
:
ncpus
=
1
mygpus
,
MemPerProc
,
coresperproc
,
cpusperproc
=
[],
20
,
1
,
1
def
genReduce
(
token
,
tipo
=
MPI
.
INT
,
ope
=
MPI
.
MIN
):
...
...
@@ -2077,7 +2078,7 @@ Units are pixels per projection.
P
=
Parameters
P
.
ncpus
=
ncpus
if
len
(
P
.
CURRENT_NAME
)
:
...
...
PyHST/PyHST.py
View file @
c4973f48
...
...
@@ -486,7 +486,7 @@ def callpyhst(inputfile):
os
.
system
(
"for nvar in `awk 'BEGIN{for(v in ENVIRON) print v}' | grep MPI` ; do unset $nvar ; done ; "
+
comando
+
" "
+
gpus_string
)
else
:
args
=
[
nprocs
,
"pyhst_2_"
+
myversion
]
+
[
inputfile
]
+
sys
.
argv
[
2
:]
seguito
=
" %s "
*
len
(
sys
.
argv
[
1
:]
)
seguito
=
" %s "
*
(
len
(
args
)
-
2
)
comando
=
(
"mpirun -n %d %s "
+
seguito
)
%
tuple
(
args
)
os
.
system
(
comando
+
" "
+
gpus_string
)
...
...
@@ -512,7 +512,7 @@ if(sys.argv[0][-12:]!="sphinx-build"):
msg
,
errors
=
p1
.
communicate
()
if
" 2."
in
msg
or
" 3."
in
msg
:
mpi_extra
=
" --bind-to none --map-by slot "
if
P
.
IF_EXCLUDE
is
not
None
:
mpi_extra
+=
" -mca btl_tcp_if_exclude %s "
%
P
.
IF_EXCLUDE
...
...
@@ -525,17 +525,20 @@ if(sys.argv[0][-12:]!="sphinx-build"):
if
P
.
JP2EDF_DIR
is
not
None
:
if
SCHEDULER_TYPE
!=
"SLURM"
:
args
=
[
mpi_extra
,
rshagent
,
machinefile_name
,
nprocs
*
4
,
"pyhst_jp2edf2_"
+
myversion
]
+
[
inputfile
]
+
[
"jp2edf"
]
+
sys
.
argv
[
2
:]
args
=
[
mpi_extra
,
rshagent
,
machinefile_name
,
nprocs
,
"pyhst_jp2edf2_"
+
myversion
]
+
[
inputfile
]
+
[
"jp2edf"
]
+
[
P
.
ncpus
]
+
sys
.
argv
[
2
:]
seguito
=
" %s "
*
(
len
(
args
)
-
5
)
comando
=
(
"mpirun %s
--mca mpi_warn_on_fork 0 --mca orte_rsh_agent %s -machinefile %s -n %d
%s "
+
seguito
)
%
tuple
(
args
)
comando
=
(
"mpirun %s
--mca mpi_warn_on_fork 0 --mca orte_rsh_agent %s -machinefile %s -n %d
%s "
+
seguito
)
%
tuple
(
args
)
if
P
.
VERBOSITY
>
1
:
print
(
comando
+
" "
+
gpus_string
)
os
.
system
(
"for nvar in `awk 'BEGIN{for(v in ENVIRON) print v}' | grep MPI` ; do unset $nvar ; done ; "
+
comando
+
" "
+
gpus_string
)
else
:
args
=
[
nprocs
*
4
,
"pyhst_jp2edf2_"
+
myversion
]
+
[
inputfile
]
+
[
"jp2edf"
]
+
sys
.
argv
[
2
:]
seguito
=
" %s "
*
len
(
sys
.
argv
[
1
:])
npp
=
4
if
SCHEDULER_TYPE
==
"SLURM"
:
npp
=
1
args
=
[
nprocs
*
npp
,
"pyhst_jp2edf2_"
+
myversion
]
+
[
inputfile
]
+
[
"jp2edf"
]
+
[
P
.
ncpus
]
+
sys
.
argv
[
2
:]
seguito
=
" %s "
*
(
len
(
args
)
-
2
)
comando
=
(
"mpirun -n %d %s "
+
seguito
)
%
tuple
(
args
)
if
P
.
VERBOSITY
>
1
:
print
(
comando
+
" "
+
gpus_string
)
os
.
system
(
+
comando
+
" "
+
gpus_string
)
os
.
system
(
comando
+
" "
+
gpus_string
)
jpfilename
=
os
.
path
.
join
(
os
.
path
.
dirname
(
inputfile
),
"jp2edf_"
+
os
.
path
.
basename
(
inputfile
))
+
".par"
...
...
@@ -638,7 +641,7 @@ if(sys.argv[0][-12:]!="sphinx-build"):
os
.
system
(
"for nvar in `awk 'BEGIN{for(v in ENVIRON) print v}' | grep MPI` ; do unset $nvar ; done ; "
+
comando
+
" "
+
gpus_string
)
else
:
args
=
[
nprocs
,
"pyhst_segment2_"
+
myversion
]
+
[
"segmenta"
,
pagbone_outputfile
,
threshold
,
medianR
,
maskbone_outputfile
,
dilatation
]
+
sys
.
argv
[
2
:]
seguito
=
" %s "
*
len
(
sys
.
argv
[
1
:]
)
seguito
=
" %s "
*
(
len
(
args
)
-
2
)
comando
=
(
"mpirun -n %d %s "
+
seguito
)
%
tuple
(
args
)
if
P
.
VERBOSITY
>
1
:
print
(
comando
+
" "
+
gpus_string
)
os
.
system
(
comando
+
" "
+
gpus_string
)
...
...
@@ -680,7 +683,7 @@ if(sys.argv[0][-12:]!="sphinx-build"):
os
.
system
(
"for nvar in `awk 'BEGIN{for(v in ENVIRON) print v}' | grep MPI` ; do unset $nvar ; done ; "
+
comando
+
" "
+
gpus_string
)
else
:
args
=
[
nprocs
,
"pyhst_segment2_"
+
myversion
]
+
[
"spilla"
,
pagbone_outputfile
,
absbone_outputfile
,
maskbone_outputfile
,
remplacement
,
corrbone_outputfile
]
+
sys
.
argv
[
2
:]
seguito
=
" %s "
*
len
(
sys
.
argv
[
1
:]
)
seguito
=
" %s "
*
(
len
(
args
)
-
2
)
comando
=
(
"mpirun -n %d %s "
+
seguito
)
%
tuple
(
args
)
if
P
.
VERBOSITY
>
2
:
print
(
comando
+
" "
+
gpus_string
)
os
.
system
(
comando
+
" "
+
gpus_string
)
...
...
@@ -760,7 +763,7 @@ if(sys.argv[0][-12:]!="sphinx-build"):
os
.
system
(
"for nvar in `awk 'BEGIN{for(v in ENVIRON) print v}' | grep MPI` ; do unset $nvar ; done ; "
+
comando
+
" "
+
gpus_string
)
else
:
args
=
[
nprocs
,
"pyhst_segment2_"
+
myversion
]
+
[
"incolla"
,
pagbone_outputfile
,
maskbone_outputfile
,
correctedvol_outputfile
,
P
.
OUTPUT_FILE
]
+
sys
.
argv
[
2
:]
seguito
=
" %s "
*
len
(
sys
.
argv
[
1
:]
)
seguito
=
" %s "
*
(
len
(
args
)
-
2
)
comando
=
(
"mpirun -n %d %s "
+
seguito
)
%
tuple
(
args
)
if
P
.
VERBOSITY
>
2
:
print
(
comando
+
" "
+
gpus_string
)
os
.
system
(
comando
+
" "
+
gpus_string
)
...
...
@@ -787,7 +790,7 @@ if(sys.argv[0][-12:]!="sphinx-build"):
os
.
system
(
"for nvar in `awk 'BEGIN{for(v in ENVIRON) print v}' | grep MPI` ; do unset $nvar ; done ; "
+
comando
+
" "
+
gpus_string
)
else
:
args
=
[
nprocs
,
"pyhst_jp2edf2_"
+
myversion
]
+
[
inputfile
]
+
[
"removejp2"
]
+
sys
.
argv
[
2
:]
seguito
=
" %s "
*
len
(
sys
.
argv
[
1
:]
)
seguito
=
" %s "
*
(
len
(
args
)
-
2
)
comando
=
(
"mpirun -n %d %s "
+
seguito
)
%
tuple
(
args
)
if
P
.
VERBOSITY
>
1
:
print
(
comando
+
" "
+
gpus_string
)
os
.
system
(
comando
+
" "
+
gpus_string
)
...
...
PyHST/pyhst_jp2edf.py
View file @
c4973f48
...
...
@@ -6,7 +6,6 @@ from __future__ import print_function
from
__future__
import
division
import
string
import
traceback
import
sys
...
...
@@ -24,15 +23,15 @@ FILA=[0]
import
mpi4py.MPI
as
MPI
import
setCpuSet
from
.
import
setCpuSet
import
glymur
import
threading
import
EdfFile
from
.
import
EdfFile
import
Parameters_module
from
.
import
Parameters_module
P
=
Parameters_module
.
Parameters
GLobalLock
=
threading
.
Lock
()
...
...
@@ -57,20 +56,32 @@ class ProcessingObject:
self
.
source
=
source
self
.
target
=
target
def
process
(
self
):
print
(
" converto "
,
self
.
source
,
self
.
target
)
a
=
glymur
.
Jp2k
(
self
.
source
)
print
(
" conversione OK "
,
self
.
target
)
# data=a[:]
print
(
" scrivo su "
,
self
.
target
)
# f = EdfFile.EdfFile (self.target,"wb")
# f.WriteImage({},data)
# f=None
# print( " PROCESS ", EdfFile)
a
=
glymur
.
Jp2k
(
self
.
source
)
# print( " conversione OK ", self.target)
data
=
a
[:]
data
=
data
.
T
print
(
self
.
target
)
f
=
EdfFile
.
EdfFile
(
self
.
target
,
"wb"
)
f
.
WriteImage
({},
data
)
f
=
None
# print( " waiting " )
with
WriteLock
:
print
(
" aa "
,
FILA
[
0
])
FILA
[
0
]
-=
1
# def processa( i) :
# a=glymur.Jp2k( edf_sources[i] )
# # print( " conversione OK ", self.target)
# data=a[:]
# data=data.T
# print( edf_targets[i])
# f = EdfFile.EdfFile(edf_targets[i],"wb")
# f.WriteImage({},data)
# f=None
class
threadLauncher
(
threading
.
Thread
):
def
__init__
(
self
,
processingObject
=
None
):
...
...
@@ -80,7 +91,7 @@ class threadLauncher(threading.Thread):
def
run
(
self
):
self
.
processingObject
.
process
()
def
jp2edf
()
:
def
jp2edf
(
ncpus
)
:
myrank
=
MPI
.
COMM_WORLD
.
Get_rank
()
nprocs
=
MPI
.
COMM_WORLD
.
Get_size
()
...
...
@@ -98,58 +109,24 @@ def jp2edf() :
maxnargs
=
4
# ncpus, mygpus, MemPerProc=setCpuSet.setCpuSet(maxnargs)
ncpus
=
10
nsources
=
len
(
edf_sources
)
if
(
nsources
%
nprocs
):
lentok
=
nsources
//
nprocs
+
1
else
:
lentok
=
nsources
//
nprocs
# edf_targets = edf_targets[myrank *lentok: (myrank+1) *lentok ]
# edf_sources = edf_sources[myrank *lentok: (myrank+1) *lentok ]
N
=
len
(
edf_sources
)
FILA
[
0
]
=
0
i
=
0
while
(
1
):
with
WriteLock
:
if
FILA
[
0
]
<
ncpus
:
ido
=
i
FILA
[
0
]
+=
1
# pobject = ProcessingObject( edf_sources[i], edf_targets[i] )
# pobject.process()
# print( " converto " , edf_sources[i])
a
=
glymur
.
Jp2k
(
edf_sources
[
i
]
)
# print( " conversione OK ", self.target)
data
=
a
[:]
data
=
data
.
T
FILA
[
0
]
-=
1
print
(
edf_targets
[
i
])
f
=
EdfFile
.
EdfFile
(
edf_targets
[
i
],
"wb"
)
f
.
WriteImage
({},
data
)
f
=
None
# launcher = threadLauncher(pobject)
# launcher.start()
pobject
=
ProcessingObject
(
edf_sources
[
ido
],
edf_targets
[
ido
]
)
t
=
threadLauncher
(
pobject
)
# print( " START ", EdfFile)
t
.
start
()
i
+=
1
# time.sleep(0.01)
if
i
==
N
:
# time.sleep(0.01)
if
i
==
nsources
:
break
...
...
@@ -172,6 +149,7 @@ def removejp2() :
if
sys
.
argv
[
2
]
==
"jp2edf"
:
jp2edf
()
ncpus
=
int
(
sys
.
argv
[
3
])
jp2edf
(
ncpus
)
else
:
removejp2
()
setup.py
View file @
c4973f48
...
...
@@ -86,7 +86,7 @@ DO_LINK = 0
global
version
global
aumento_versione
aumento_versione
=
"
b
"
aumento_versione
=
"
c
"
global
version
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment