extras-buildsys/builder builder.py,1.34,1.35

Daniel Williams (dcbw) fedora-extras-commits at redhat.com
Tue Aug 30 21:47:22 UTC 2005


Author: dcbw

Update of /cvs/fedora/extras-buildsys/builder
In directory cvs-int.fedora.redhat.com:/tmp/cvs-serv16000/builder

Modified Files:
	builder.py 
Log Message:
2005-08-30  Dan Williams <dcbw at redhat.com>

    * builder/builder.py
        - all self.log() -> self._log()

    * server/Builder.py
        - Fix condition where finished jobs wouldn't get noticed
            by the server

    * server/PackageJob.py
        - Fix CVS commands




Index: builder.py
===================================================================
RCS file: /cvs/fedora/extras-buildsys/builder/builder.py,v
retrieving revision 1.34
retrieving revision 1.35
diff -u -r1.34 -r1.35
--- builder.py	29 Aug 2005 19:28:40 -0000	1.34
+++ builder.py	30 Aug 2005 21:47:20 -0000	1.35
@@ -81,6 +81,7 @@
         self._controller = controller
         self.buildarch = buildarch
         self._starttime = time.time()
+        self._endtime = 0
         self._uniqid = uniqid
         self._status = 'init'
         self._die = False
@@ -110,7 +111,7 @@
 
         target_dict = self._target_cfg.target_dict()
         target_str = "%s-%s-%s-%s" % (target_dict['distro'], target_dict['target'], target_dict['arch'], target_dict['repo'])
-        self.log("""Starting job:
+        self._log("""Starting job:
    Time: %s
    Target: %s
    UID: %s
@@ -122,7 +123,7 @@
         except FileDownloader.FileNameError, e:
             self._status = 'failed'
             self._srpm_path = None
-            self.log("Failed to extract SRPM filename.  Error: '%s'   URL: %s\n" % (e, srpm_url))
+            self._log("Failed to extract SRPM filename.  Error: '%s'   URL: %s\n" % (e, srpm_url))
             return
 
         self._srpm_path = os.path.join(work_dir, self._uniqid, "source", srpm_filename)
@@ -131,28 +132,31 @@
     def starttime(self):
         return self._starttime
 
+    def endtime(self):
+        return self._endtime
+
     def die(self, sig=15):
         if self.is_done_status() or self._done_status == 'killed':
             return
         self._die = True
 
     def _handle_death(self):
-        self.log("Killing build process...\n")
+        self._log("Killing build process...\n")
         # Don't try to kill a running cleanup process
         if self._status != 'cleanup' and self._pobj and self._pobj.pid:
             try:
                 os.kill(self._pobj.pid, 15)
             except OSError, e:
-                self.log("Couldn't kill process %d: %s\n" % (self._pobj.pid, e))
+                self._log("Couldn't kill process %d: %s\n" % (self._pobj.pid, e))
 
-        self.log("Killed.\n");
+        self._log("Killed.\n");
         self._done_status = 'killed'
 
         # Don't start cleanup over top of an existing cleanup process
         if self._status != 'cleanup':
             self._start_cleanup()
 
-    def log(self, string):
+    def _log(self, string):
         if string and self._log_fd:
             self._log_fd.write(string)
             self._log_fd.flush()
@@ -164,7 +168,7 @@
 
     def _start_srpm_download(self):
         self._status = 'downloading'
-        self.log("Starting download of %s.\n" % self._srpm_url)
+        self._log("Starting download of %s.\n" % self._srpm_url)
         target_dir = os.path.dirname(self._srpm_path)
         dl_thread = FileDownloader.FileDownloader(self.dl_callback, self._srpm_url, self._srpm_url,
                         target_dir, ['.src.rpm'], certs)
@@ -174,16 +178,16 @@
         url = cb_data
         if dl_status == 'done':
             self._status = 'downloaded'
-            self.log("Retrieved %s.\n" % url)
+            self._log("Retrieved %s.\n" % url)
         elif dl_status == 'failed':
             # Don't overwrite our status with 'failed' if we were cancelled
             # and a download error ocurred
             if not self.is_done_status():
                 self._status = 'failed'
-                self.log("Failed to retrieve %s.\n" % url)
+                self._log("Failed to retrieve %s.\n" % url)
 
     def _start_build(self):
-        self.log("Starting step 'building' with command:\n")
+        self._log("Starting step 'building' with command:\n")
         if not os.path.exists(self._result_dir):
             os.makedirs(self._result_dir)
         if not os.path.exists(self._result_dir):
@@ -192,7 +196,7 @@
                         self.buildarch, self._result_dir, self._state_dir, self._uniqid, self._srpm_path)
         builder_cmd = self._builder_cfg.get_str("General", "builder_cmd")
         cmd = '%s %s %s' % (self.arch_command, builder_cmd, mock_args)
-        self.log("   %s\n" % cmd)
+        self._log("   %s\n" % cmd)
         self._pobj = popen2.Popen4(cmd=cmd, bufsize=1024)
         fcntl.fcntl(self._pobj.fromchild.fileno(), fcntl.F_SETFL, os.O_NONBLOCK)
         self._status = 'prepping'
@@ -215,23 +219,23 @@
 
             # Kill mock after 7s if it didn't dump the status file
             if time.time() - start_time > 7:
-                self.log("Timed out waiting for the mock status file!  %s\n" % mockstatusfile)
+                self._log("Timed out waiting for the mock status file!  %s\n" % mockstatusfile)
                 try:
-                    self.log("Killing mock...\n")
+                    self._log("Killing mock...\n")
                     os.kill(self._pobj.pid, 15)
                 except OSError, e:
-                    self.log("Couldn't kill mock process %d: %s\n" % (self._pobj.pid, e))
+                    self._log("Couldn't kill mock process %d: %s\n" % (self._pobj.pid, e))
                 else:
-                    self.log("Killed.\n")
+                    self._log("Killed.\n")
                 self._status = 'failed'
                 break
 
     def _start_cleanup(self):
-        self.log("Cleaning up the buildroot...\n")
+        self._log("Cleaning up the buildroot...\n")
         builder_cmd = self._builder_cfg.get_str("General", "builder_cmd")
         cmd = '%s %s clean --uniqueext=%s -r %s' % (self.arch_command,
                             builder_cmd, self._uniqid, self.buildroot)
-        self.log("   %s\n" % cmd)
+        self._log("   %s\n" % cmd)
         self._pobj = popen2.Popen4(cmd=cmd)
         self._status = 'cleanup'
 
@@ -316,7 +320,7 @@
                     if e.errno == errno.EAGAIN:     # Resource temporarily unavailable
                         break
                     else:
-                        self.log("Error reading mock output: %s\n" % e)
+                        self._log("Error reading mock output: %s\n" % e)
                 else:
                     # We don't care about output from the 'cleanup' stage
                     if self._status != 'cleanup':
@@ -330,15 +334,15 @@
             self._pobj.poll()
 
         self._files = self._find_files()
-        self.log("\n\n-----------------------\n\n")
+        self._log("\n\n-----------------------\n\n")
         if self._status == 'done':
-            self.log("Job completed successfully.\n")
+            self._log("Job completed successfully.\n")
         elif self._status == 'failed':
             if self._pobj:
                 exit_status = self._pobj.poll()
-                self.log("Job failed due to mock errors!  Please see output in root.log and build.log\n")
+                self._log("Job failed due to mock errors!  Please see output in root.log and build.log\n")
         elif self._status == 'killed':
-            self.log("Job failed because it was killed.\n")
+            self._log("Job failed because it was killed.\n")
 
         if self._log_fd:
             self._log_fd.close()
@@ -358,7 +362,7 @@
         else:
             # Only show this message once
             if not self._repo_locked_msg:
-                self.log("Waiting for repository to unlock before starting the build...\n")
+                self._log("Waiting for repository to unlock before starting the build...\n")
                 self._repo_locked_msg = True
 
     def _status_prepping(self):
@@ -372,7 +376,7 @@
         if exit_status == 0:
             # mock completed successfully
             if self._status != 'building':
-                self.log("Bad job end status %s encountered!" % self._status)
+                self._log("Bad job end status %s encountered!" % self._status)
             self._done_status = 'done'
             self._start_cleanup()
         elif exit_status > 0:
@@ -409,7 +413,7 @@
                 func = getattr(self, "_status_%s" % self._status)
                 func()
             except AttributeError:
-                self.log("ERROR: internal builder inconsistency, didn't recognize status '%s'." % self._status)
+                self._log("ERROR: internal builder inconsistency, didn't recognize status '%s'." % self._status)
                 self._status = 'failed'
 
             self._grab_mock_output()
@@ -419,21 +423,21 @@
 
             time.sleep(3)
 
-        print "Done with job loop: %s" % self._status
+        self._endtime = time.time()
         self._controller.notify_job_done(self)
 
     def _find_files(self):
         # Grab the list of files in our job's result dir and URL encode them
         files_in_dir = os.listdir(self._result_dir)
         file_list = []
-        self.log("\n\nOutput File List:\n-----------------\n")
+        self._log("\n\nOutput File List:\n-----------------\n")
         for f in files_in_dir:
             file_url = get_url_for_file(self._builder_cfg, os.path.join(self._result_dir, f))
             if file_url:
                 file_list.append(file_url)
-                self.log("  Output File: %s\n" % urllib.unquote(file_url))
+                self._log("  Output File: %s\n" % urllib.unquote(file_url))
             else:
-                self.log("  Error: Couldn't get file URL for file %s" % f)
+                self._log("  Error: Couldn't get file URL for file %s" % f)
         return file_list
 
     def status(self):
@@ -504,6 +508,7 @@
                 'sparc64':  Sparc64Arch
                }
 
+
 class XMLRPCBuilderServer:
     def __init__(self, cfg, max_jobs):
         self._all_jobs = {} # unique id => awclass instance
@@ -512,7 +517,7 @@
         self._cfg = cfg
         self._max_jobs = max_jobs
 
-    def log(self, string):
+    def _log(self, string):
         if self._cfg.get_bool("General", "debug"):
             print string
 
@@ -562,7 +567,7 @@
         self._building_jobs_lock.acquire()
         num_building = len(self._building_jobs)
         if num_building >= self._max_jobs:
-            self.log("Tried to build '%s' on target %s when already building" \
+            self._log("Tried to build '%s' on target %s when already building" \
                         " %d/%d jobs" % (srpm_url, target_str, num_building, self._max_jobs))
             self._building_jobs_lock.release()
             return 0
@@ -570,7 +575,7 @@
 
         target_cfg = self._get_target_cfg(target_dict)
         if not target_cfg:
-            self.log("Tried to build '%s' on target %s which isn't supported" % (srpm_url, target_str))
+            self._log("Tried to build '%s' on target %s which isn't supported" % (srpm_url, target_str))
             return 0
 
         uniqid = self._generate_uniqid(target_str, srpm_url)
@@ -582,10 +587,10 @@
             self._building_jobs_lock.release()
             job.start()
             filename = os.path.basename(srpm_url)
-            self.log("%s: started %s on %s arch %s at time %d" % (uniqid, filename,
+            self._log("%s: started %s on %s arch %s at time %d" % (uniqid, filename,
                         target_str, target_dict['arch'], job.starttime()))
         else:
-            self.log("%s: Failed request for %s on %s UNSUPPORTED arch %s" %
+            self._log("%s: Failed request for %s on %s UNSUPPORTED arch %s" %
                         (uniqid, srpm_url, target_str, target_dict['arch'], cur_time))
             uniqid = 0
 
@@ -603,6 +608,7 @@
             job.die()
         except KeyError:
             pass
+        return 0
 
     def files(self, uniqid):
         try:




More information about the scm-commits mailing list