文章目录
多进程 Multiprocessing添加进程 Process存储进程输出 Queue效率对比 threading & multiprocessing进程池 Pool共享内存 shared memory进程锁 Lock
多进程 Multiprocessing
添加进程 Process
import multiprocessing
as mp
def job(a
,d
):
print('aaaaa')
p1
= mp
.Process
(target
=job
,args
=(1,2))
p1
.start
()
p1
.join
()
存储进程输出 Queue
import multiprocessing
as mp
def job(q
):
res
=0
for i
in range(1000):
res
+=i
+i
**2+i
**3
q
.put
(res
)
if __name__
=='__main__':
q
= mp
.Queue
()
p1
= mp
.Process
(target
=job
,args
=(q
,))
p2
= mp
.Process
(target
=job
,args
=(q
,))
p1
.start
()
p2
.start
()
p1
.join
()
p2
.join
()
res1
= q
.get
()
res2
= q
.get
()
print(res1
+res2
)
效率对比 threading & multiprocessing
import multiprocessing
as mp
import threading
as td
import time
def job(q
):
res
= 0
for i
in range(1000000):
res
+= i
+ i
**2 + i
**3
q
.put
(res
)
def multicore():
q
= mp
.Queue
()
p1
= mp
.Process
(target
=job
, args
=(q
,))
p2
= mp
.Process
(target
=job
, args
=(q
,))
p1
.start
()
p2
.start
()
p1
.join
()
p2
.join
()
res1
= q
.get
()
res2
= q
.get
()
print('multicore:',res1
+ res2
)
def multithread():
q
= mp
.Queue
()
t1
= td
.Thread
(target
=job
, args
=(q
,))
t2
= td
.Thread
(target
=job
, args
=(q
,))
t1
.start
()
t2
.start
()
t1
.join
()
t2
.join
()
res1
= q
.get
()
res2
= q
.get
()
print('multithread:', res1
+ res2
)
def normal():
res
= 0
for _
in range(2):
for i
in range(1000000):
res
+= i
+ i
**2 + i
**3
print('normal:', res
)
if __name__
== '__main__':
st
= time
.time
()
normal
()
st1
= time
.time
()
print('normal time:', st1
- st
)
multithread
()
st2
= time
.time
()
print('multithread time:', st2
- st1
)
multicore
()
print('multicore time:', time
.time
() - st2
)
"""
# range(1000000)
('normal:', 499999666667166666000000L)
('normal time:', 1.1306169033050537)
('thread:', 499999666667166666000000L)
('multithread time:', 1.3054230213165283)
('multicore:', 499999666667166666000000L)
('multicore time:', 0.646507978439331)
"""
进程池 Pool
import multiprocessing
as mp
def job(x
):
return x
*x
def multicore():
pool
= mp
.Pool
(processes
=2)
res
= pool
.map(job
, range(10))
print(res
)
res
= pool
.apply_async
(job
, (2,))
print(res
.get
())
multi_res
= [pool
.apply_async
(job
, (i
,)) for i
in range(10)]
print([res
.get
() for res
in multi_res
])
if __name__
== '__main__':
multicore
()
'''
[0, 1, 4, 9, 16, 25, 36, 49, 64, 81] # map()
4
[0, 1, 4, 9, 16, 25, 36, 49, 64, 81] # multi_res
'''
总结
Pool默认调用是CPU的核数,传入processes参数可自定义CPU核数map() 放入迭代参数,返回多个结果apply_async()只能放入一组参数,并返回一个结果,如果想得到map()的效果需要通过迭代
共享内存 shared memory
import multiprocessing
as mp
value1
= mp
.Value
('i', 0)
value2
= mp
.Value
('d', 3.14)
array
= mp
.Array
('i', [1, 2, 3, 4])
参考数据形式
https
://docs
.python
.org
/3.5/library
/array
.html
进程锁 Lock
import multiprocessing
as mp
import time
def job(v
, num
, l
):
l
.acquire
()
for _
in range(5):
time
.sleep
(0.1)
v
.value
+= num
print(v
.value
)
l
.release
()
def multicore():
l
= mp
.Lock
()
v
= mp
.Value
('i', 0)
p1
= mp
.Process
(target
=job
, args
=(v
, 1, l
))
p2
= mp
.Process
(target
=job
, args
=(v
, 3, l
))
p1
.start
()
p2
.start
()
p1
.join
()
p2
.join
()
if __name__
== '__main__':
multicore
()