step
What is a process?
Process refers to the running of a program, or a program that is being executed.
So the process is a virtual concept, which originated in the operating system.
A CPU can only perform one thing at a time
Start a process
from multiprocessing import Process import time def task(name): print('%s is running'%name) (3) print('%s is done'%name) # The operation of starting a child process must be placed in the # if __name__ == '__main__' in subcode # Subprocesses are not reloaded if __name__ == '__main__': p=Process(target=task,args=('Little King',)) # p=Process(target=task,kwargs={'name':'Xiao Wang'}) # print(p) () # The master process just sends a signal to the operating system to turn on the child process # # () # 1. The operating system requests memory space first # 2. Copy the data from the master process to the child process. # 3. Call cup to run the code inside. # High overhead of the creation process print('Lord')
JOIN method
The current process jion the other process. The current process then waits until the other process has finished executing before moving on to the next one.
from multiprocessing import Process import time def task(name, n): print('%s is running' % name) (n) print('%s is done' % name) if __name__ == '__main__': start = () p_l = [] for i in range(1, 4): p = Process(target=task, args=('Little King%s' % i, i)) p_l.append(p) () # Master process waiting for child process for p in p_l: () print('Lord', (() - start))
Space isolation between processes
from multiprocessing import Process # This n is the value inside the master process n = 100 def task(): global n # It's a global variable inside a child process that's changed # No change in the main process n = 0 if __name__ == '__main__': p=Process(target=task) () () print(n)
Common approaches to processes
current_process View pid(process id)
# 1. process pid: each process in the operating system has a unique id number, called pid from multiprocessing import Process, current_process import time def task(): print('%s is running' % current_process().pid) (3) print('%s is done' % current_process().pid) # The operation of starting a child process must be placed in the # if __name__ == '__main__' in subcode # Subprocesses are not reloaded if __name__ == '__main__': p = Process(target=task) () print('Lord', current_process().pid)
() View process id
# os modules can also from multiprocessing import Process, current_process import time, os def task(): print('%s is running Dad is %s' % ((), ())) (3) print('%s is done dad is %s' % ((), ())) # The operation of starting a child process must be placed in the # if __name__ == '__main__' in subcode # Subprocesses are not reloaded if __name__ == '__main__': p = Process(target=task) () # Who created the master process # With pycharm is what pycharm creates # print('Lord %s father is %s' % ((), ()))
Other methods and properties of the process
from multiprocessing import Process,current_process import time,os def task(): print('%s is running Dad is %s'%((),())) (30) print('%s is done dad is %s'%((),())) # The operation of starting a child process must be placed in the # if __name__ == '__main__' in subcode # Subprocesses are not reloaded if __name__ == '__main__': p=Process(target=task) () # Who created the master process # With pycharm is what pycharm creates # # Name of the process print() # Kill the child process () # It takes time (0.1) # Determine if a child process is alive print(p.is_alive()) print('Lord %s father is %s'%((),()))
daemon
It is essentially a "child process" whose life cycle <= life cycle of the daemon.
When the guarded process is done executing. It will also be killed
# When the main process is done, the child processes have no purpose in existence # # The emperor and the eunuch don't live together, but they die together # from multiprocessing import Process import time def task(name): print('%s alive'%name) (3) print('%s normal death'%name) if __name__ == '__main__': p1=Process(target=task,args=('Old Eunuch',)) # Declare child processes as daemons = True () (1) print('The Emperor is dying')
mutually exclusive lock
How to share data when processes are isolated from each other's memory space
Memory data is not shared between processes, but they share the same file system, so there is no problem accessing the same file.
And sharing brings competition, and competition brings misalignment, and how to control that is to add locks to handle the
''' ticket search check tickets buy tickets Mutually exclusive locks: To perform locking in a program You must release the lock next lock to acquire it, so the program must have to release the lock at the right time So use files to handle shared data 1. Slow 2. Must have mutual exclusion locks ''' import json import time,random from multiprocessing import Process,Lock # Checking tickets def search(name): with open('','rt',encoding='utf-8')as f: dic = (f) # Simulated ticket checking time (1) print('%s Viewed remaining tickets as %s'%(name,dic['count'])) # Ticketing # The second get subprocess will not be the result of the first get subprocess' modified count # Add mutex locks to turn this part of concurrency into serial. # # But at the expense of efficiency, data security # def get(name): with open('','rt',encoding='utf-8')as f: dic = (f) if dic['count']>0: dic['count']-=1 ((1,3)) with open('', 'wt', encoding='utf-8')as f: (dic,f) print('%s Ticket Purchased Successfully'%name) else: print('%s Viewed no more tickets'%name) def task(name,mutex): # Concurrent search(name) # Serial # Mutually exclusive locks () get(name) # Release mutex locks () # if __name__ == '__main__': # for i in range(10): # p=Process(target=task,args=('passerby%s'%i,)) # () # join can only serialize the tasks of a process as a whole # Mutual exclusion locks can be locally serialized # () # # Data security, meaning it doesn't matter when reading, it must be secure when writing (changing) # Serial for writes, concurrent for reads # Locked if __name__ == '__main__': # Master process locking mutex=Lock() for i in range(10): # The lock is passed to the child process p=Process(target=task,args=('Passerby%s'%i,mutex)) () # join can only make a process's tasks serial in their entirety # Mutual exclusion locks can be locally serialized # () # Data security, meaning it doesn't matter when reading, it must be secure when writing (changing) # Serial for writes, concurrent for reads #
There are only 10 tickets in it. If it is not locked. Then the ticket chanting may be oversold
Inter-process communication (IPC mechanism)
''' Speed. Lock problem solving ipc mechanism Processes are isolated from each other, and to implement inter-process communication (IPC), the The multiprocessing module supports two forms: queues and pipes, both of which use message passing Shared memory space Queues = pipes + locks ''' from multiprocessing import Queue # Memory usage, preferably small data, message data, download address # Queue (limit the number in the queue) # FIFO q=Queue(3) # Add ('a') ('b') ({'x':2}) print('The basket is full') # The queue's full, it's the equivalent of a lock # ({'x':2}) # Extraction print(()) print(()) print(()) # # The queue is empty, waiting to be joined, also blocks, equivalent to a lock print('The queue is empty') print(())
When the queue has been fetched, the latter () will block until a new element is available. So the program won't end
JoinableQueue to implement a production consumer
JoinableQueue#task_done() method ends the thread when there are no more elements in the queue.
''' Xiao Wang and Xiao Zhou each produce 10 points of buns and shredded potatoes Xiao Dai and Xiao Yang keep eating when there is no food inside the queue. End the process ''' import time, random from multiprocessing import Process, JoinableQueue def producer(name, food, q): for i in range(10): res = '%s%s' % (food, i) # of time to simulate production data ((1, 3)) (res) print('Chef %s generated %s' % (name, res)) def consumer(name, q): while True: # Orders are gone and still waiting, the queue is empty # res = () # of time spent simulating processing data ((1, 3)) print('Eat%s ate%s' % (name, res)) # 1 Fetch the queue once per completion, go to () , fetch clean () run through q.task_done() # Multiple producers and consumers if __name__ == '__main__': q = JoinableQueue() # Producers p1 = Process(target=producer, args=('Little King', 'Bun', q)) p3 = Process(target=producer, args=('Cho', 'Shredded potato', q)) # Consumers c1 = Process(target=consumer, args=('Dai', q)) c2 = Process(target=consumer, args=('Little Yang', q)) # #3. What daemons do: When the main process dies, the consumer subprocesses die with it. # Turning consumers into daemons = True = True () () () () () () # 2 Consumer task_done signals to () () print('Lord') # Producers run out? 1,2 # Consumers run out? 1,2
When the queue is empty, you don't wait for it, you end the process.
summarize
That's all for this post, I hope it helps you and I hope you'll check back for more from me!