foo = Foo() threads = [gevent.spawn(foo.inc) for i inrange(2000)] gevent.joinall(threads) print("test_gevent:", foo.i)
deftest_multiprocess(): """ 每个进程的foo变量各自独立,不共享,所以这里打印出0,在主进程的foo.i没被计算到, 需要将foo改为进程共享变量,才能观察其是 进程不安全 的,看下面的test_multiprocess_2函数 :return: """ foo = Foo() process_list = [] for i inrange(200): process = Process(target=foo.inc) process_list.append(process) for process in process_list: process.start() for process in process_list: process.join() print("test_multiprocess:", foo.i)
deftest_multiprocess_s(): """ 在+=操作不加锁的情况下,这里打印的将是小于进程数200。 这是个进程不安全的例子。 在multiprocessing模块官方文档有这么一句话说multiprocessing.Value是进程/线程安全的 > These shared objects will be process and thread-safe. 这句话是说在python字节代码层面上对该类型的读或写操作时是进程/线程安全的。但对于整个自增操作就不是了。 以下是自增变量的字节代码,并注释了Value加锁的位置,参考 https://eli.thegreenplace.net/2012/01/04/shared-counter-with-pythons-multiprocessing 这篇文章 0 LOAD_FAST 0 (val) 3 DUP_TOP #<--- Value lock acquired 4 LOAD_ATTR 0 (value) #<--- Value lock released 7 LOAD_CONST 1 (1) 10 INPLACE_ADD 11 ROT_TWO #<--- Value lock acquired 12 STORE_ATTR 0 (value) #<--- Value lock released :return: """ shared_value = Value("i", 0) process_list = [] for i inrange(200): process = Process(target=inc_s, args=(shared_value, )) process_list.append(process) for process in process_list: process.start() for process in process_list: process.join() print("test_multiprocess_s: ", shared_value.value)
if __name__ == '__main__': test_multithread() test_multiprocess() test_multiprocess_s() test_gevent()