NetRender
authorMartin Poirier <theeth@yahoo.com>
Sun, 2 May 2010 21:34:08 +0000 (21:34 +0000)
committerMartin Poirier <theeth@yahoo.com>
Sun, 2 May 2010 21:34:08 +0000 (21:34 +0000)
Repath functionality: Edit paths in blend file on slaves to match the transfered files.
Tested with textures and point caches. Fluid should work. Point Caches with the External option might not.

This should fix previous problems when using absolute paths in blend files.

release/scripts/io/netrender/__init__.py
release/scripts/io/netrender/model.py
release/scripts/io/netrender/repath.py [new file with mode: 0755]
release/scripts/io/netrender/slave.py
release/scripts/io/netrender/utils.py

index e06d061f1738e0ea1b699054ef85a340477e2e49..f5f104d6d9229d2eb6eb6dc7a4231be6d46b2693 100644 (file)
@@ -27,6 +27,7 @@ from netrender import master_html
 from netrender import utils
 from netrender import balancing
 from netrender import ui
+from netrender import repath
 
 jobs = []
 slaves = []
index a2912c78c56a04b79f3209471464f41ff05a3aca..e7656f498b4a450800d5bf35e9cd580b98ab9150 100644 (file)
@@ -105,6 +105,7 @@ JOB_TYPES = {
 class RenderFile:
     def __init__(self, filepath = "", index = 0, start = -1, end = -1, signature=0):
         self.filepath = filepath
+        self.original_path = filepath
         self.signature = signature
         self.index = index
         self.start = start
@@ -113,6 +114,7 @@ class RenderFile:
     def serialize(self):
         return         {
                     "filepath": self.filepath,
+                    "original_path": self.original_path,
                     "index": self.index,
                     "start": self.start,
                     "end": self.end,
@@ -125,6 +127,7 @@ class RenderFile:
             return None
 
         rfile = RenderFile(data["filepath"], data["index"], data["start"], data["end"], data["signature"])
+        rfile.original_path = data["original_path"]
 
         return rfile
 
diff --git a/release/scripts/io/netrender/repath.py b/release/scripts/io/netrender/repath.py
new file mode 100755 (executable)
index 0000000..5287fc0
--- /dev/null
@@ -0,0 +1,145 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU General Public License
+#  as published by the Free Software Foundation; either version 2
+#  of the License, or (at your option) any later version.
+#
+#  This program is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#  GNU General Public License for more details.
+#
+#  You should have received a copy of the GNU General Public License
+#  along with this program; if not, write to the Free Software Foundation,
+#  Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+import sys, os
+import subprocess
+
+import bpy
+
+from netrender.utils import *
+import netrender.model
+
+BLENDER_PATH = sys.argv[0]
+
+def reset(job):
+    main_file = job.files[0]
+    
+    job_full_path = main_file.filepath
+
+    if os.path.exists(job_full_path + ".bak"):
+        os.remove(job_full_path) # repathed file
+        os.renames(job_full_path + ".bak", job_full_path)
+
+def update(job):
+    paths = []
+    
+    main_file = job.files[0]
+    
+    job_full_path = main_file.filepath
+
+        
+    path, ext = os.path.splitext(job_full_path)
+    
+    new_path = path + ".remap" + ext 
+    
+    all = main_file.filepath == main_file.original_path 
+    
+    for rfile in job.files[1:]:
+        if all or rfile.original_path != rfile.filepath:
+            paths.append(rfile.original_path)
+            paths.append(rfile.filepath)
+    
+    # Only update if needed
+    if paths:        
+        process = subprocess.Popen([BLENDER_PATH, "-b", "-noaudio", job_full_path, "-P", __file__, "--", new_path] + paths, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+        process.wait()
+        
+        os.renames(job_full_path, job_full_path + ".bak")
+        os.renames(new_path, job_full_path)
+
+def process(paths):
+    def processPointCache(point_cache):
+        point_cache.external = False
+
+    def processFluid(fluid):
+        new_path = path_map.get(fluid.path, None)
+        if new_path:
+            fluid.path = new_path
+    
+    path_map = {}
+    for i in range(0, len(paths), 2):
+        # special case for point cache
+        if paths[i].endswith(".bphys"):
+            pass # Don't need them in the map, they all use the default external path
+            # NOTE: This is probably not correct all the time, need to be fixed.
+        # special case for fluids
+        elif paths[i].endswith(".bobj.gz"):
+            path_map[os.path.split(paths[i])[0]] = os.path.split(paths[i+1])[0]
+        else:
+            path_map[paths[i]] = paths[i+1]
+    
+    ###########################
+    # LIBRARIES
+    ###########################
+    for lib in bpy.data.libraries:
+        file_path = bpy.utils.expandpath(lib.filename)
+        new_path = path_map.get(file_path, None)
+        if new_path:
+            lib.filename = new_path
+
+    ###########################
+    # IMAGES
+    ###########################
+    for image in bpy.data.images:
+        if image.source == "FILE" and not image.packed_file:
+            file_path = bpy.utils.expandpath(image.filename)
+            new_path = path_map.get(file_path, None)
+            if new_path:
+                image.filename = new_path
+            
+
+    ###########################
+    # FLUID + POINT CACHE
+    ###########################
+    for object in bpy.data.objects:
+        for modifier in object.modifiers:
+            if modifier.type == 'FLUID_SIMULATION' and modifier.settings.type == "DOMAIN":
+                processFluid(settings)
+            elif modifier.type == "CLOTH":
+                processPointCache(modifier.point_cache)
+            elif modifier.type == "SOFT_BODY":
+                processPointCache(modifier.point_cache)
+            elif modifier.type == "SMOKE" and modifier.smoke_type == "TYPE_DOMAIN":
+                processPointCache(modifier.domain_settings.point_cache_low)
+                if modifier.domain_settings.highres:
+                    processPointCache(modifier.domain_settings.point_cache_high)
+            elif modifier.type == "MULTIRES" and modifier.external:
+                file_path = bpy.utils.expandpath(modifier.filename)
+                new_path = path_map.get(file_path, None)
+                if new_path:
+                    modifier.filename = new_path
+
+        # particles modifier are stupid and don't contain data
+        # we have to go through the object property
+        for psys in object.particle_systems:
+            processPointCache(psys.point_cache)
+                
+
+if __name__ == "__main__":
+    try:
+        i = sys.argv.index("--")
+    except:
+        i = 0
+    
+    if i:
+        new_path = sys.argv[i+1]
+        args = sys.argv[i+2:]
+        
+        process(args)
+        
+        bpy.ops.wm.save_as_mainfile(path=new_path, check_existing=False)
index 43420c1b5b686b12956a41a5ef777e94a7b2a88a..9fd00152dc129bb65a7c908b960e8878d64a7d51 100644 (file)
@@ -22,6 +22,7 @@ import subprocess, time
 
 from netrender.utils import *
 import netrender.model
+import netrender.repath
 
 BLENDER_PATH = sys.argv[0]
 
@@ -75,9 +76,10 @@ def testFile(conn, job_id, slave_id, rfile, JOB_PREFIX, main_path = None):
         
         if not found:
             print("Found file %s at %s but signature mismatch!" % (rfile.filepath, job_full_path))
+            job_full_path = prefixPath(JOB_PREFIX, rfile.filepath, main_path, force = True)
 
     if not found:
-        temp_path = JOB_PREFIX + "slave.temp.blend"
+        temp_path = JOB_PREFIX + "slave.temp"
         conn.request("GET", fileURL(job_id, rfile.index), headers={"slave-id":slave_id})
         response = conn.getresponse()
 
@@ -94,6 +96,8 @@ def testFile(conn, job_id, slave_id, rfile, JOB_PREFIX, main_path = None):
         f.close()
 
         os.renames(temp_path, job_full_path)
+        
+    rfile.filepath = job_full_path
 
     return job_full_path
 
@@ -138,11 +142,14 @@ def render_slave(engine, netsettings, threads):
                     job_full_path = testFile(conn, job.id, slave_id, job.files[0], JOB_PREFIX)
                     print("Fullpath", job_full_path)
                     print("File:", main_file, "and %i other files" % (len(job.files) - 1,))
-                    engine.update_stats("", "Render File "+ main_file+ " for job "+ job.id)
 
                     for rfile in job.files[1:]:
-                        print("\t", rfile.filepath)
                         testFile(conn, job.id, slave_id, rfile, JOB_PREFIX, main_path)
+                        print("\t", rfile.filepath)
+                        
+                    netrender.repath.update(job)
+
+                    engine.update_stats("", "Render File "+ main_file+ " for job "+ job.id)
 
                 # announce log to master
                 logfile = netrender.model.LogFile(job.id, slave_id, [frame.number for frame in job.frames])
@@ -198,6 +205,9 @@ def render_slave(engine, netsettings, threads):
                         if testCancel(conn, job.id, first_frame):
                             cancelled = True
 
+                if job.type == netrender.model.JOB_BLENDER:
+                    netrender.repath.reset(job)
+
                 # read leftovers if needed
                 stdout += process.stdout.read()
 
index acd45178c2fb19a8db54461217dc5466b3acb291..f194cdc9559589f67935bf3ca8f75a4dd15ae834 100644 (file)
@@ -28,7 +28,7 @@ try:
 except:
   bpy = None
 
-VERSION = bytes("0.8", encoding='utf8')
+VERSION = bytes("0.9", encoding='utf8')
 
 # Jobs status
 JOB_WAITING = 0 # before all data has been entered
@@ -166,18 +166,21 @@ def hashData(data):
     return m.hexdigest()
     
 
-def prefixPath(prefix_directory, file_path, prefix_path):
+def prefixPath(prefix_directory, file_path, prefix_path, force = False):
     if os.path.isabs(file_path):
         # if an absolute path, make sure path exists, if it doesn't, use relative local path
         full_path = file_path
-        if not os.path.exists(full_path):
+        if force or not os.path.exists(full_path):
             p, n = os.path.split(full_path)
 
             if prefix_path and p.startswith(prefix_path):
-                directory = prefix_directory + p[len(prefix_path):]
-                full_path = directory + os.sep + n
-                if not os.path.exists(directory):
-                    os.mkdir(directory)
+                if len(prefix_path) < len(p):
+                    directory = prefix_directory + p[len(prefix_path)+1:] + os.sep # +1 to remove separator
+                    if not os.path.exists(directory):
+                        os.mkdir(directory)
+                else:
+                    directory = prefix_directory
+                full_path = directory + n
             else:
                 full_path = prefix_directory + n
     else: