diff options
Diffstat (limited to 'engine/SCons/Job.py')
-rw-r--r-- | engine/SCons/Job.py | 34 |
1 files changed, 17 insertions, 17 deletions
diff --git a/engine/SCons/Job.py b/engine/SCons/Job.py index e3d7f0f..bbf1def 100644 --- a/engine/SCons/Job.py +++ b/engine/SCons/Job.py @@ -29,7 +29,9 @@ stop, and wait on jobs. # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # -__revision__ = "src/engine/SCons/Job.py 4720 2010/03/24 03:14:11 jars" +__revision__ = "src/engine/SCons/Job.py 5023 2010/06/14 22:05:46 scons" + +import SCons.compat import os import signal @@ -50,7 +52,7 @@ default_stack_size = 256 interrupt_msg = 'Build interrupted.' -class InterruptState: +class InterruptState(object): def __init__(self): self.interrupted = False @@ -61,7 +63,7 @@ class InterruptState: return self.interrupted -class Jobs: +class Jobs(object): """An instance of this class initializes N jobs, and provides methods for starting, stopping, and waiting on all N jobs. """ @@ -127,7 +129,7 @@ class Jobs: turns out that it very difficult to stop the build process by throwing asynchronously an exception such as KeyboardInterrupt. For example, the python Condition - variables (threading.Condition) and Queue's do not seem to + variables (threading.Condition) and queue's do not seem to asynchronous-exception-safe. It would require adding a whole bunch of try/finally block and except KeyboardInterrupt all over the place. @@ -161,7 +163,7 @@ class Jobs: except AttributeError: pass -class Serial: +class Serial(object): """This class is used to execute tasks in series, and is more efficient than Parallel, but is only appropriate for non-parallel builds. Only one instance of this class should be in existence at a time. @@ -187,7 +189,7 @@ class Serial: fails to execute (i.e. execute() raises an exception), then the job will stop.""" - while 1: + while True: task = self.taskmaster.next_task() if task is None: @@ -221,7 +223,7 @@ class Serial: # Parallel class (and its dependent classes) will work if the interpreter # doesn't support threads. try: - import Queue + import queue import threading except ImportError: pass @@ -240,7 +242,7 @@ else: self.start() def run(self): - while 1: + while True: task = self.requestQueue.get() if task is None: @@ -262,7 +264,7 @@ else: self.resultsQueue.put((task, ok)) - class ThreadPool: + class ThreadPool(object): """This class is responsible for spawning and managing worker threads.""" def __init__(self, num, stack_size, interrupted): @@ -271,8 +273,8 @@ else: One must specify the stack size of the worker threads. The stack size is specified in kilobytes. """ - self.requestQueue = Queue.Queue(0) - self.resultsQueue = Queue.Queue(0) + self.requestQueue = queue.Queue(0) + self.resultsQueue = queue.Queue(0) try: prev_size = threading.stack_size(stack_size*1024) @@ -293,9 +295,7 @@ else: worker = Worker(self.requestQueue, self.resultsQueue, interrupted) self.workers.append(worker) - # Once we drop Python 1.5 we can change the following to: - #if 'prev_size' in locals(): - if 'prev_size' in locals().keys(): + if 'prev_size' in locals(): threading.stack_size(prev_size) def put(self, task): @@ -338,7 +338,7 @@ else: worker.join(1.0) self.workers = [] - class Parallel: + class Parallel(object): """This class is used to execute tasks in parallel, and is somewhat less efficient than Serial, but is appropriate for parallel builds. @@ -374,7 +374,7 @@ else: jobs = 0 - while 1: + while True: # Start up as many available tasks as we're # allowed to. while jobs < self.maxjobs: @@ -402,7 +402,7 @@ else: # Let any/all completed tasks finish up before we go # back and put the next batch of tasks on the queue. - while 1: + while True: task, ok = self.tp.get() jobs = jobs - 1 |