2 * ***** BEGIN GPL LICENSE BLOCK *****
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, write to the Free Software Foundation,
16 * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * The Original Code is Copyright (C) 2007 Blender Foundation.
19 * All rights reserved.
21 * The Original Code is: all of this file.
25 * ***** END GPL LICENSE BLOCK *****
28 /** \file blender/nodes/composite/node_composite_tree.c
35 #include "DNA_anim_types.h"
36 #include "DNA_scene_types.h"
37 #include "DNA_node_types.h"
39 #include "BLI_listbase.h"
40 #include "BLI_threads.h"
42 #include "BLF_translation.h"
44 #include "BKE_animsys.h"
45 #include "BKE_colortools.h"
46 #include "BKE_fcurve.h"
47 #include "BKE_global.h"
50 #include "BKE_tracking.h"
51 #include "BKE_utildefines.h"
53 #include "node_exec.h"
54 #include "node_util.h"
58 #include "RNA_access.h"
60 #include "NOD_composite.h"
61 #include "node_composite_util.h"
63 static void foreach_nodetree(Main *main, void *calldata, bNodeTreeCallback func)
66 for(sce= main->scene.first; sce; sce= sce->id.next) {
68 func(calldata, &sce->id, sce->nodetree);
73 static void foreach_nodeclass(Scene *UNUSED(scene), void *calldata, bNodeClassCallback func)
75 func(calldata, NODE_CLASS_INPUT, IFACE_("Input"));
76 func(calldata, NODE_CLASS_OUTPUT, IFACE_("Output"));
77 func(calldata, NODE_CLASS_OP_COLOR, IFACE_("Color"));
78 func(calldata, NODE_CLASS_OP_VECTOR, IFACE_("Vector"));
79 func(calldata, NODE_CLASS_OP_FILTER, IFACE_("Filter"));
80 func(calldata, NODE_CLASS_CONVERTOR, IFACE_("Convertor"));
81 func(calldata, NODE_CLASS_MATTE, IFACE_("Matte"));
82 func(calldata, NODE_CLASS_DISTORT, IFACE_("Distort"));
83 func(calldata, NODE_CLASS_GROUP, IFACE_("Group"));
84 func(calldata, NODE_CLASS_LAYOUT, IFACE_("Layout"));
87 static void free_node_cache(bNodeTree *UNUSED(ntree), bNode *node)
91 for(sock= node->outputs.first; sock; sock= sock->next) {
93 free_compbuf(sock->cache);
99 static void free_cache(bNodeTree *ntree)
102 for(node= ntree->nodes.first; node; node= node->next)
103 free_node_cache(ntree, node);
106 static void update_node(bNodeTree *ntree, bNode *node)
110 for(sock= node->outputs.first; sock; sock= sock->next) {
112 //free_compbuf(sock->cache);
118 /* individual node update call */
119 if (node->typeinfo->updatefunc)
120 node->typeinfo->updatefunc(ntree, node);
123 /* local tree then owns all compbufs */
124 static void localize(bNodeTree *UNUSED(localtree), bNodeTree *ntree)
129 for(node= ntree->nodes.first; node; node= node->next) {
130 /* ensure new user input gets handled ok */
133 /* move over the compbufs */
134 /* right after ntreeCopyTree() oldsock pointers are valid */
136 if(ELEM(node->type, CMP_NODE_VIEWER, CMP_NODE_SPLITVIEWER)) {
138 if(node->flag & NODE_DO_OUTPUT)
139 node->new_node->id= (ID *)copy_image((Image *)node->id);
141 node->new_node->id= NULL;
145 for(sock= node->outputs.first; sock; sock= sock->next) {
146 sock->new_sock->cache= sock->cache;
147 compbuf_set_node(sock->new_sock->cache, node->new_node);
150 sock->new_sock->new_sock= sock;
155 static void local_sync(bNodeTree *localtree, bNodeTree *ntree)
159 /* move over the compbufs and previews */
160 for(lnode= localtree->nodes.first; lnode; lnode= lnode->next) {
161 if( (lnode->exec & NODE_READY) && !(lnode->exec & NODE_SKIPPED) ) {
162 if(ntreeNodeExists(ntree, lnode->new_node)) {
164 if(lnode->preview && lnode->preview->rect) {
165 nodeFreePreview(lnode->new_node);
166 lnode->new_node->preview= lnode->preview;
167 lnode->preview= NULL;
174 static void local_merge(bNodeTree *localtree, bNodeTree *ntree)
179 /* move over the compbufs and previews */
180 for(lnode= localtree->nodes.first; lnode; lnode= lnode->next) {
181 if(ntreeNodeExists(ntree, lnode->new_node)) {
182 if(ELEM(lnode->type, CMP_NODE_VIEWER, CMP_NODE_SPLITVIEWER)) {
183 if(lnode->id && (lnode->flag & NODE_DO_OUTPUT)) {
184 /* image_merge does sanity check for pointers */
185 BKE_image_merge((Image *)lnode->new_node->id, (Image *)lnode->id);
188 else if(lnode->type==CMP_NODE_MOVIEDISTORTION) {
189 /* special case for distortion node: distortion context is allocating in exec function
190 and to achive much better performance on further calls this context should be
191 copied back to original node */
193 if(lnode->new_node->storage)
194 BKE_tracking_distortion_destroy(lnode->new_node->storage);
196 lnode->new_node->storage= BKE_tracking_distortion_copy(lnode->storage);
200 for(lsock= lnode->outputs.first; lsock; lsock= lsock->next) {
201 if(ntreeOutputExists(lnode->new_node, lsock->new_sock)) {
202 lsock->new_sock->cache= lsock->cache;
203 compbuf_set_node(lsock->new_sock->cache, lnode->new_node);
205 lsock->new_sock= NULL;
212 static void update(bNodeTree *ntree)
214 ntreeSetOutput(ntree);
217 bNodeTreeType ntreeType_Composite = {
218 /* type */ NTREE_COMPOSIT,
219 /* idname */ "NTCompositing Nodetree",
221 /* node_types */ { NULL, NULL },
223 /* free_cache */ free_cache,
224 /* free_node_cache */ free_node_cache,
225 /* foreach_nodetree */ foreach_nodetree,
226 /* foreach_nodeclass */ foreach_nodeclass,
227 /* localize */ localize,
228 /* local_sync */ local_sync,
229 /* local_merge */ local_merge,
231 /* update_node */ update_node,
232 /* validate_link */ NULL,
233 /* mutefunc */ node_compo_pass_on,
234 /* mutelinksfunc */ node_mute_get_links,
235 /* gpumutefunc */ NULL
239 /* XXX Group nodes must set use_tree_data to false, since their trees can be shared by multiple nodes.
240 * If use_tree_data is true, the ntree->execdata pointer is checked to avoid multiple execution of top-level trees.
242 struct bNodeTreeExec *ntreeCompositBeginExecTree(bNodeTree *ntree, int use_tree_data)
249 /* XXX hack: prevent exec data from being generated twice.
250 * this should be handled by the renderer!
253 return ntree->execdata;
256 /* ensures only a single output node is enabled */
257 ntreeSetOutput(ntree);
259 exec = ntree_exec_begin(ntree);
261 for(node= exec->nodetree->nodes.first; node; node= node->next) {
262 /* initialize needed for groups */
265 for(sock= node->outputs.first; sock; sock= sock->next) {
266 bNodeStack *ns= node_get_socket_stack(exec->stack, sock);
267 if(ns && sock->cache) {
268 ns->data= sock->cache;
272 /* cannot initialize them while using in threads */
273 if(ELEM4(node->type, CMP_NODE_TIME, CMP_NODE_CURVE_VEC, CMP_NODE_CURVE_RGB, CMP_NODE_HUECORRECT)) {
274 curvemapping_initialize(node->storage);
275 if(node->type==CMP_NODE_CURVE_RGB)
276 curvemapping_premultiply(node->storage, 0);
281 /* XXX this should not be necessary, but is still used for cmp/sha/tex nodes,
282 * which only store the ntree pointer. Should be fixed at some point!
284 ntree->execdata = exec;
290 /* XXX Group nodes must set use_tree_data to false, since their trees can be shared by multiple nodes.
291 * If use_tree_data is true, the ntree->execdata pointer is checked to avoid multiple execution of top-level trees.
293 void ntreeCompositEndExecTree(bNodeTreeExec *exec, int use_tree_data)
296 bNodeTree *ntree= exec->nodetree;
300 for(node= exec->nodetree->nodes.first; node; node= node->next) {
303 for(sock= node->outputs.first; sock; sock= sock->next) {
304 ns = node_get_socket_stack(exec->stack, sock);
306 sock->cache= ns->data;
310 if(node->type==CMP_NODE_CURVE_RGB)
311 curvemapping_premultiply(node->storage, 1);
316 ntree_exec_end(exec);
319 /* XXX clear nodetree backpointer to exec data, same problem as noted in ntreeBeginExecTree */
320 ntree->execdata = NULL;
325 /* ***************************** threaded version for execute composite nodes ************* */
326 /* these are nodes without input, only giving values */
327 /* or nodes with only value inputs */
328 static int node_only_value(bNode *node)
332 if(ELEM3(node->type, CMP_NODE_TIME, CMP_NODE_VALUE, CMP_NODE_RGB))
335 /* doing this for all node types goes wrong. memory free errors */
336 if(node->inputs.first && node->type==CMP_NODE_MAP_VALUE) {
338 for(sock= node->inputs.first; sock; sock= sock->next) {
340 retval &= node_only_value(sock->link->fromnode);
347 /* not changing info, for thread callback */
348 typedef struct ThreadData {
353 static void *exec_composite_node(void *nodeexec_v)
355 bNodeStack *nsin[MAX_SOCKET]; /* arbitrary... watch this */
356 bNodeStack *nsout[MAX_SOCKET]; /* arbitrary... watch this */
357 bNodeExec *nodeexec= nodeexec_v;
358 bNode *node= nodeexec->node;
359 ThreadData *thd= (ThreadData *)node->threaddata;
361 node_get_stack(node, thd->stack, nsin, nsout);
363 if((node->flag & NODE_MUTED) && node->typeinfo->mutefunc)
364 node->typeinfo->mutefunc(thd->rd, 0, node, nodeexec->data, nsin, nsout);
365 else if(node->typeinfo->execfunc)
366 node->typeinfo->execfunc(thd->rd, node, nsin, nsout);
367 else if (node->typeinfo->newexecfunc)
368 node->typeinfo->newexecfunc(thd->rd, 0, node, nodeexec->data, nsin, nsout);
370 node->exec |= NODE_READY;
374 /* return total of executable nodes, for timecursor */
375 static int setExecutableNodes(bNodeTreeExec *exec, ThreadData *thd)
377 bNodeTree *ntree = exec->nodetree;
378 bNodeStack *nsin[MAX_SOCKET]; /* arbitrary... watch this */
379 bNodeStack *nsout[MAX_SOCKET]; /* arbitrary... watch this */
383 int n, totnode= 0, group_edit= 0;
385 /* if we are in group edit, viewer nodes get skipped when group has viewer */
386 for(node= ntree->nodes.first; node; node= node->next)
387 if(node->type==NODE_GROUP && (node->flag & NODE_GROUP_EDIT))
388 if(ntreeHasType((bNodeTree *)node->id, CMP_NODE_VIEWER))
391 /* NB: using the exec data list here to have valid dependency sort */
392 for(n=0, nodeexec=exec->nodeexec; n < exec->totnodes; ++n, ++nodeexec) {
394 node = nodeexec->node;
396 node_get_stack(node, exec->stack, nsin, nsout);
398 /* test the outputs */
399 /* skip value-only nodes (should be in type!) */
400 if(!node_only_value(node)) {
401 for(a=0, sock= node->outputs.first; sock; sock= sock->next, a++) {
402 if(nsout[a]->data==NULL && nsout[a]->hasoutput) {
409 /* test the inputs */
410 for(a=0, sock= node->inputs.first; sock; sock= sock->next, a++) {
411 /* skip viewer nodes in bg render or group edit */
412 if( ELEM(node->type, CMP_NODE_VIEWER, CMP_NODE_SPLITVIEWER) && (G.background || group_edit))
414 /* is sock in use? */
415 else if(sock->link) {
416 bNodeLink *link= sock->link;
418 /* this is the test for a cyclic case */
419 if(link->fromnode==NULL || link->tonode==NULL);
420 else if(link->fromnode->level >= link->tonode->level && link->tonode->level!=0xFFF) {
421 if(link->fromnode->need_exec) {
428 printf("Node %s skipped, cyclic dependency\n", node->name);
433 if(node->need_exec) {
435 /* free output buffers */
436 for(a=0, sock= node->outputs.first; sock; sock= sock->next, a++) {
438 free_compbuf(nsout[a]->data);
439 nsout[a]->data= NULL;
443 /* printf("node needs exec %s\n", node->name); */
445 /* tag for getExecutableNode() */
449 /* tag for getExecutableNode() */
450 node->exec= NODE_READY|NODE_FINISHED|NODE_SKIPPED;
455 /* last step: set the stack values for only-value nodes */
456 /* just does all now, compared to a full buffer exec this is nothing */
458 for(n=0, nodeexec=exec->nodeexec; n < exec->totnodes; ++n, ++nodeexec) {
459 node = nodeexec->node;
460 if(node->need_exec==0 && node_only_value(node)) {
461 if(node->typeinfo->execfunc) {
462 node_get_stack(node, exec->stack, nsin, nsout);
463 node->typeinfo->execfunc(thd->rd, node, nsin, nsout);
472 /* while executing tree, free buffers from nodes that are not needed anymore */
473 static void freeExecutableNode(bNodeTreeExec *exec)
475 /* node outputs can be freed when:
476 - not a render result or image node
477 - when node outputs go to nodes all being set NODE_FINISHED
479 bNodeTree *ntree = exec->nodetree;
485 /* set exec flag for finished nodes that might need freed */
486 for(node= ntree->nodes.first; node; node= node->next) {
487 if(node->type!=CMP_NODE_R_LAYERS)
488 if(node->exec & NODE_FINISHED)
489 node->exec |= NODE_FREEBUFS;
491 /* clear this flag for input links that are not done yet.
492 * Using the exec data for valid dependency sort.
494 for(n=0, nodeexec=exec->nodeexec; n < exec->totnodes; ++n, ++nodeexec) {
495 node = nodeexec->node;
496 if((node->exec & NODE_FINISHED)==0) {
497 for(sock= node->inputs.first; sock; sock= sock->next)
499 sock->link->fromnode->exec &= ~NODE_FREEBUFS;
502 /* now we can free buffers */
503 for(node= ntree->nodes.first; node; node= node->next) {
504 if(node->exec & NODE_FREEBUFS) {
505 for(sock= node->outputs.first; sock; sock= sock->next) {
506 bNodeStack *ns= node_get_socket_stack(exec->stack, sock);
508 free_compbuf(ns->data);
510 // printf("freed buf node %s \n", node->name);
517 static bNodeExec *getExecutableNode(bNodeTreeExec *exec)
523 for(n=0, nodeexec=exec->nodeexec; n < exec->totnodes; ++n, ++nodeexec) {
524 if(nodeexec->node->exec==0) {
525 /* input sockets should be ready */
526 for(sock= nodeexec->node->inputs.first; sock; sock= sock->next) {
527 if(sock->link && sock->link->fromnode)
528 if((sock->link->fromnode->exec & NODE_READY)==0)
538 /* check if texture nodes need exec or end */
539 static void ntree_composite_texnode(bNodeTree *ntree, int init)
543 for(node= ntree->nodes.first; node; node= node->next) {
544 if(node->type==CMP_NODE_TEXTURE && node->id) {
545 Tex *tex= (Tex *)node->id;
546 if(tex->nodetree && tex->use_nodes) {
547 /* has internal flag to detect it only does it once */
549 if (!tex->nodetree->execdata)
550 tex->nodetree->execdata = ntreeTexBeginExecTree(tex->nodetree, 1);
553 ntreeTexEndExecTree(tex->nodetree->execdata, 1);
554 tex->nodetree->execdata = NULL;
561 /* optimized tree execute test for compositing */
562 void ntreeCompositExecTree(bNodeTree *ntree, RenderData *rd, int do_preview)
568 int totnode, curnode, rendering= 1, n;
571 if(ntree==NULL) return;
573 exec = ntree->execdata;
576 ntreeInitPreview(ntree, 0, 0);
578 if (!ntree->execdata) {
579 /* XXX this is the top-level tree, so we use the ntree->execdata pointer. */
580 exec = ntreeCompositBeginExecTree(ntree, 1);
582 ntree_composite_texnode(ntree, 1);
584 /* prevent unlucky accidents */
586 rd->scemode &= ~R_COMP_CROP;
588 /* setup callerdata for thread callback */
590 thdata.stack= exec->stack;
592 /* fixed seed, for example noise texture */
593 BLI_srandom(rd->cfra);
595 /* sets need_exec tags in nodes */
596 curnode = totnode= setExecutableNodes(exec, &thdata);
598 BLI_init_threads(&threads, exec_composite_node, rd->threads);
602 if(BLI_available_threads(&threads)) {
603 nodeexec= getExecutableNode(exec);
605 node = nodeexec->node;
606 if(ntree->progress && totnode)
607 ntree->progress(ntree->prh, (1.0f - curnode/(float)totnode));
608 if(ntree->stats_draw) {
610 BLI_snprintf(str, sizeof(str), "Compositing %d %s", curnode, node->name);
611 ntree->stats_draw(ntree->sdh, str);
615 node->threaddata = &thdata;
616 node->exec= NODE_PROCESSING;
617 BLI_insert_thread(&threads, nodeexec);
627 if(ntree->test_break && ntree->test_break(ntree->tbh)) {
628 for(node= ntree->nodes.first; node; node= node->next)
629 node->exec |= NODE_READY;
632 /* check for ready ones, and if we need to continue */
633 for(n=0, nodeexec=exec->nodeexec; n < exec->totnodes; ++n, ++nodeexec) {
634 node = nodeexec->node;
635 if(node->exec & NODE_READY) {
636 if((node->exec & NODE_FINISHED)==0) {
637 BLI_remove_thread(&threads, nodeexec); /* this waits for running thread to finish btw */
638 node->exec |= NODE_FINISHED;
640 /* freeing unused buffers */
641 if(rd->scemode & R_COMP_FREE)
642 freeExecutableNode(exec);
649 BLI_end_threads(&threads);
651 /* XXX top-level tree uses the ntree->execdata pointer */
652 ntreeCompositEndExecTree(exec, 1);
655 /* *********************************************** */
657 /* clumsy checking... should do dynamic outputs once */
658 static void force_hidden_passes(bNode *node, int passflag)
662 for(sock= node->outputs.first; sock; sock= sock->next)
663 sock->flag &= ~SOCK_UNAVAIL;
665 if(!(passflag & SCE_PASS_COMBINED)) {
666 sock= BLI_findlink(&node->outputs, RRES_OUT_IMAGE);
667 sock->flag |= SOCK_UNAVAIL;
668 sock= BLI_findlink(&node->outputs, RRES_OUT_ALPHA);
669 sock->flag |= SOCK_UNAVAIL;
672 sock= BLI_findlink(&node->outputs, RRES_OUT_Z);
673 if(!(passflag & SCE_PASS_Z)) sock->flag |= SOCK_UNAVAIL;
674 sock= BLI_findlink(&node->outputs, RRES_OUT_NORMAL);
675 if(!(passflag & SCE_PASS_NORMAL)) sock->flag |= SOCK_UNAVAIL;
676 sock= BLI_findlink(&node->outputs, RRES_OUT_VEC);
677 if(!(passflag & SCE_PASS_VECTOR)) sock->flag |= SOCK_UNAVAIL;
678 sock= BLI_findlink(&node->outputs, RRES_OUT_UV);
679 if(!(passflag & SCE_PASS_UV)) sock->flag |= SOCK_UNAVAIL;
680 sock= BLI_findlink(&node->outputs, RRES_OUT_RGBA);
681 if(!(passflag & SCE_PASS_RGBA)) sock->flag |= SOCK_UNAVAIL;
682 sock= BLI_findlink(&node->outputs, RRES_OUT_DIFF);
683 if(!(passflag & SCE_PASS_DIFFUSE)) sock->flag |= SOCK_UNAVAIL;
684 sock= BLI_findlink(&node->outputs, RRES_OUT_SPEC);
685 if(!(passflag & SCE_PASS_SPEC)) sock->flag |= SOCK_UNAVAIL;
686 sock= BLI_findlink(&node->outputs, RRES_OUT_SHADOW);
687 if(!(passflag & SCE_PASS_SHADOW)) sock->flag |= SOCK_UNAVAIL;
688 sock= BLI_findlink(&node->outputs, RRES_OUT_AO);
689 if(!(passflag & SCE_PASS_AO)) sock->flag |= SOCK_UNAVAIL;
690 sock= BLI_findlink(&node->outputs, RRES_OUT_REFLECT);
691 if(!(passflag & SCE_PASS_REFLECT)) sock->flag |= SOCK_UNAVAIL;
692 sock= BLI_findlink(&node->outputs, RRES_OUT_REFRACT);
693 if(!(passflag & SCE_PASS_REFRACT)) sock->flag |= SOCK_UNAVAIL;
694 sock= BLI_findlink(&node->outputs, RRES_OUT_INDIRECT);
695 if(!(passflag & SCE_PASS_INDIRECT)) sock->flag |= SOCK_UNAVAIL;
696 sock= BLI_findlink(&node->outputs, RRES_OUT_INDEXOB);
697 if(!(passflag & SCE_PASS_INDEXOB)) sock->flag |= SOCK_UNAVAIL;
698 sock= BLI_findlink(&node->outputs, RRES_OUT_INDEXMA);
699 if(!(passflag & SCE_PASS_INDEXMA)) sock->flag |= SOCK_UNAVAIL;
700 sock= BLI_findlink(&node->outputs, RRES_OUT_MIST);
701 if(!(passflag & SCE_PASS_MIST)) sock->flag |= SOCK_UNAVAIL;
702 sock= BLI_findlink(&node->outputs, RRES_OUT_EMIT);
703 if(!(passflag & SCE_PASS_EMIT)) sock->flag |= SOCK_UNAVAIL;
704 sock= BLI_findlink(&node->outputs, RRES_OUT_ENV);
705 if(!(passflag & SCE_PASS_ENVIRONMENT)) sock->flag |= SOCK_UNAVAIL;
707 sock= BLI_findlink(&node->outputs, RRES_OUT_DIFF_DIRECT);
708 if(!(passflag & SCE_PASS_DIFFUSE_DIRECT)) sock->flag |= SOCK_UNAVAIL;
709 sock= BLI_findlink(&node->outputs, RRES_OUT_DIFF_INDIRECT);
710 if(!(passflag & SCE_PASS_DIFFUSE_INDIRECT)) sock->flag |= SOCK_UNAVAIL;
711 sock= BLI_findlink(&node->outputs, RRES_OUT_DIFF_COLOR);
712 if(!(passflag & SCE_PASS_DIFFUSE_COLOR)) sock->flag |= SOCK_UNAVAIL;
714 sock= BLI_findlink(&node->outputs, RRES_OUT_GLOSSY_DIRECT);
715 if(!(passflag & SCE_PASS_GLOSSY_DIRECT)) sock->flag |= SOCK_UNAVAIL;
716 sock= BLI_findlink(&node->outputs, RRES_OUT_GLOSSY_INDIRECT);
717 if(!(passflag & SCE_PASS_GLOSSY_INDIRECT)) sock->flag |= SOCK_UNAVAIL;
718 sock= BLI_findlink(&node->outputs, RRES_OUT_GLOSSY_COLOR);
719 if(!(passflag & SCE_PASS_GLOSSY_COLOR)) sock->flag |= SOCK_UNAVAIL;
721 sock= BLI_findlink(&node->outputs, RRES_OUT_TRANSM_DIRECT);
722 if(!(passflag & SCE_PASS_TRANSM_DIRECT)) sock->flag |= SOCK_UNAVAIL;
723 sock= BLI_findlink(&node->outputs, RRES_OUT_TRANSM_INDIRECT);
724 if(!(passflag & SCE_PASS_TRANSM_INDIRECT)) sock->flag |= SOCK_UNAVAIL;
725 sock= BLI_findlink(&node->outputs, RRES_OUT_TRANSM_COLOR);
726 if(!(passflag & SCE_PASS_TRANSM_COLOR)) sock->flag |= SOCK_UNAVAIL;
727 sock= BLI_findlink(&node->outputs, RRES_OUT_TRANSM_COLOR);
730 /* based on rules, force sockets hidden always */
731 void ntreeCompositForceHidden(bNodeTree *ntree, Scene *curscene)
735 if(ntree==NULL) return;
737 for(node= ntree->nodes.first; node; node= node->next) {
738 if( node->type==CMP_NODE_R_LAYERS) {
739 Scene *sce= node->id?(Scene *)node->id:curscene;
740 SceneRenderLayer *srl= BLI_findlink(&sce->r.layers, node->custom1);
742 force_hidden_passes(node, srl->passflag);
744 else if( node->type==CMP_NODE_IMAGE) {
745 Image *ima= (Image *)node->id;
748 ImageUser *iuser= node->storage;
749 RenderLayer *rl= BLI_findlink(&ima->rr->layers, iuser->layer);
751 force_hidden_passes(node, rl->passflag);
753 force_hidden_passes(node, RRES_OUT_IMAGE|RRES_OUT_ALPHA);
755 else if(ima->type!=IMA_TYPE_MULTILAYER) { /* if ->rr not yet read we keep inputs */
756 force_hidden_passes(node, RRES_OUT_IMAGE|RRES_OUT_ALPHA|RRES_OUT_Z);
759 force_hidden_passes(node, RRES_OUT_IMAGE|RRES_OUT_ALPHA);
762 force_hidden_passes(node, RRES_OUT_IMAGE|RRES_OUT_ALPHA);
768 /* called from render pipeline, to tag render input and output */
769 /* need to do all scenes, to prevent errors when you re-render 1 scene */
770 void ntreeCompositTagRender(Scene *curscene)
774 for(sce= G.main->scene.first; sce; sce= sce->id.next) {
778 for(node= sce->nodetree->nodes.first; node; node= node->next) {
779 if(node->id==(ID *)curscene || node->type==CMP_NODE_COMPOSITE)
780 nodeUpdate(sce->nodetree, node);
781 else if(node->type==CMP_NODE_TEXTURE) /* uses scene sizex/sizey */
782 nodeUpdate(sce->nodetree, node);
788 static int node_animation_properties(bNodeTree *ntree, bNode *node)
796 /* check to see if any of the node's properties have fcurves */
797 RNA_pointer_create((ID *)ntree, &RNA_Node, node, &ptr);
798 lb = RNA_struct_type_properties(ptr.type);
800 for (link=lb->first; link; link=link->next) {
801 int driven, len=1, index;
802 prop = (PropertyRNA *)link;
804 if (RNA_property_array_check(prop))
805 len = RNA_property_array_length(&ptr, prop);
807 for (index=0; index<len; index++) {
808 if (rna_get_fcurve(&ptr, prop, index, NULL, &driven)) {
809 nodeUpdate(ntree, node);
815 /* now check node sockets */
816 for (sock = node->inputs.first; sock; sock=sock->next) {
817 int driven, len=1, index;
819 RNA_pointer_create((ID *)ntree, &RNA_NodeSocket, sock, &ptr);
820 prop = RNA_struct_find_property(&ptr, "default_value");
822 if (RNA_property_array_check(prop))
823 len = RNA_property_array_length(&ptr, prop);
825 for (index=0; index<len; index++) {
826 if (rna_get_fcurve(&ptr, prop, index, NULL, &driven)) {
827 nodeUpdate(ntree, node);
837 /* tags nodes that have animation capabilities */
838 int ntreeCompositTagAnimated(bNodeTree *ntree)
843 if(ntree==NULL) return 0;
845 for(node= ntree->nodes.first; node; node= node->next) {
847 tagged = node_animation_properties(ntree, node);
849 /* otherwise always tag these node types */
850 if(node->type==CMP_NODE_IMAGE) {
851 Image *ima= (Image *)node->id;
852 if(ima && ELEM(ima->source, IMA_SRC_MOVIE, IMA_SRC_SEQUENCE)) {
853 nodeUpdate(ntree, node);
857 else if(node->type==CMP_NODE_TIME) {
858 nodeUpdate(ntree, node);
861 /* here was tag render layer, but this is called after a render, so re-composites fail */
862 else if(node->type==NODE_GROUP) {
863 if( ntreeCompositTagAnimated((bNodeTree *)node->id) ) {
864 nodeUpdate(ntree, node);
867 else if(ELEM(node->type, CMP_NODE_MOVIECLIP, CMP_NODE_TRANSFORM)) {
868 nodeUpdate(ntree, node);
877 /* called from image window preview */
878 void ntreeCompositTagGenerators(bNodeTree *ntree)
882 if(ntree==NULL) return;
884 for(node= ntree->nodes.first; node; node= node->next) {
885 if( ELEM(node->type, CMP_NODE_R_LAYERS, CMP_NODE_IMAGE))
886 nodeUpdate(ntree, node);
890 /* XXX after render animation system gets a refresh, this call allows composite to end clean */
891 void ntreeCompositClearTags(bNodeTree *ntree)
895 if(ntree==NULL) return;
897 for(node= ntree->nodes.first; node; node= node->next) {
899 if(node->type==NODE_GROUP)
900 ntreeCompositClearTags((bNodeTree *)node->id);