diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index c65058874b2..3b00ca9f6f1 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -6,18 +6,26 @@ current_file, work_root, ) - from .command import ( viewer_update_and_undo_stop ) - -from .plugin import OpenPypeCreator +from .plugin import ( + NukeCreator, + NukeWriteCreator, + NukeCreatorError, + OpenPypeCreator, + get_instance_group_node_childs, + get_colorspace_from_node +) from .pipeline import ( - install, - uninstall, + NukeHost, ls, + list_instances, + remove_instance, + select_instance, + containerise, parse_container, update_container, @@ -25,13 +33,19 @@ get_workfile_build_placeholder_plugins, ) from .lib import ( + INSTANCE_DATA_KNOB, + ROOT_DATA_KNOB, maintained_selection, reset_selection, + select_nodes, get_view_process_node, duplicate_node, - convert_knob_value_to_correct_type + convert_knob_value_to_correct_type, + get_node_data, + set_node_data, + update_node_data, + create_write_node ) - from .utils import ( colorspace_exists_on_node, get_colorspace_list @@ -47,23 +61,38 @@ "viewer_update_and_undo_stop", + "NukeCreator", + "NukeWriteCreator", + "NukeCreatorError", "OpenPypeCreator", - "install", - "uninstall", + "NukeHost", + "get_instance_group_node_childs", + "get_colorspace_from_node", "ls", + "list_instances", + "remove_instance", + "select_instance", + "containerise", "parse_container", "update_container", "get_workfile_build_placeholder_plugins", + "INSTANCE_DATA_KNOB", + "ROOT_DATA_KNOB", "maintained_selection", "reset_selection", + "select_nodes", "get_view_process_node", "duplicate_node", "convert_knob_value_to_correct_type", + "get_node_data", + "set_node_data", + "update_node_data", + "create_write_node", "colorspace_exists_on_node", "get_colorspace_list" diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 2fdf446357d..f2872675ca2 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1,14 +1,15 @@ import os from pprint import pformat import re +import json import six +import functools +import warnings import platform import tempfile import contextlib from collections import OrderedDict -import clique - import nuke from qtpy import QtCore, QtWidgets @@ -64,6 +65,54 @@ 26, # Text Knob (But for backward compatibility, still be read # if value is not an empty string.) ) +JSON_PREFIX = "JSON:::" +ROOT_DATA_KNOB = "publish_context" +INSTANCE_DATA_KNOB = "publish_instance" + + +class DeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", DeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=DeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) class Context: @@ -94,8 +143,78 @@ def get_main_window(): return Context.main_window +def set_node_data(node, knobname, data): + """Write data to node invisible knob + + Will create new in case it doesnt exists + or update the one already created. + + Args: + node (nuke.Node): node object + knobname (str): knob name + data (dict): data to be stored in knob + """ + # if exists then update data + if knobname in node.knobs(): + log.debug("Updating knobname `{}` on node `{}`".format( + knobname, node.name() + )) + update_node_data(node, knobname, data) + return + + log.debug("Creating knobname `{}` on node `{}`".format( + knobname, node.name() + )) + # else create new + knob_value = JSON_PREFIX + json.dumps(data) + knob = nuke.String_Knob(knobname) + knob.setValue(knob_value) + knob.setFlag(nuke.INVISIBLE) + node.addKnob(knob) + + +def get_node_data(node, knobname): + """Read data from node. + + Args: + node (nuke.Node): node object + knobname (str): knob name + + Returns: + dict: data stored in knob + """ + if knobname not in node.knobs(): + return + + rawdata = node[knobname].getValue() + if ( + isinstance(rawdata, six.string_types) + and rawdata.startswith(JSON_PREFIX) + ): + try: + return json.loads(rawdata[len(JSON_PREFIX):]) + except json.JSONDecodeError: + return + + +def update_node_data(node, knobname, data): + """Update already present data. + + Args: + node (nuke.Node): node object + knobname (str): knob name + data (dict): data to update knob value + """ + knob = node[knobname] + node_data = get_node_data(node, knobname) or {} + node_data.update(data) + knob_value = JSON_PREFIX + json.dumps(node_data) + knob.setValue(knob_value) + + class Knobby(object): - """For creating knob which it's type isn't mapped in `create_knobs` + """[DEPRICATED] For creating knob which it's type isn't + mapped in `create_knobs` Args: type (string): Nuke knob type name @@ -120,9 +239,15 @@ def create(self, name, nice=None): knob.setFlag(flag) return knob + @staticmethod + def nice_naming(key): + """Convert camelCase name into UI Display Name""" + words = re.findall('[A-Z][^A-Z]*', key[0].upper() + key[1:]) + return " ".join(words) + def create_knobs(data, tab=None): - """Create knobs by data + """[DEPRICATED] Create knobs by data Depending on the type of each dict value and creates the correct Knob. @@ -216,7 +341,7 @@ def nice_naming(key): def imprint(node, data, tab=None): - """Store attributes with value on node + """[DEPRICATED] Store attributes with value on node Parse user data into Node knobs. Use `collections.OrderedDict` to ensure knob order. @@ -272,7 +397,7 @@ def imprint(node, data, tab=None): def add_publish_knob(node): - """Add Publish knob to node + """[DEPRICATED] Add Publish knob to node Arguments: node (nuke.Node): nuke node to be processed @@ -290,7 +415,7 @@ def add_publish_knob(node): def set_avalon_knob_data(node, data=None, prefix="avalon:"): - """ Sets data into nodes's avalon knob + """[DEPRICATED] Sets data into nodes's avalon knob Arguments: node (nuke.Node): Nuke node to imprint with data, @@ -351,8 +476,8 @@ def set_avalon_knob_data(node, data=None, prefix="avalon:"): return node -def get_avalon_knob_data(node, prefix="avalon:"): - """ Gets a data from nodes's avalon knob +def get_avalon_knob_data(node, prefix="avalon:", create=True): + """[DEPRICATED] Gets a data from nodes's avalon knob Arguments: node (obj): Nuke node to search for data, @@ -380,8 +505,11 @@ def get_avalon_knob_data(node, prefix="avalon:"): except NameError as e: # if it doesn't then create it log.debug("Creating avalon knob: `{}`".format(e)) - node = set_avalon_knob_data(node) - return get_avalon_knob_data(node) + if create: + node = set_avalon_knob_data(node) + return get_avalon_knob_data(node) + else: + return {} # get data from filtered knobs data.update({k.replace(p, ''): node[k].value() @@ -392,7 +520,7 @@ def get_avalon_knob_data(node, prefix="avalon:"): def fix_data_for_node_create(data): - """Fixing data to be used for nuke knobs + """[DEPRICATED] Fixing data to be used for nuke knobs """ for k, v in data.items(): if isinstance(v, six.text_type): @@ -403,7 +531,7 @@ def fix_data_for_node_create(data): def add_write_node_legacy(name, **kwarg): - """Adding nuke write node + """[DEPRICATED] Adding nuke write node Arguments: name (str): nuke node name kwarg (attrs): data for nuke knobs @@ -566,15 +694,16 @@ def get_nuke_imageio_settings(): Context.project_name)["nuke"]["imageio"] # backward compatibility for project started before 3.10 - # those are still having `__legacy__` knob types + # those are still having `__legacy__` knob types. if not project_imageio["enabled"]: return get_anatomy_settings(Context.project_name)["imageio"]["nuke"] return get_project_settings(Context.project_name)["nuke"]["imageio"] +@deprecated("openpype.hosts.nuke.api.lib.get_nuke_imageio_settings") def get_created_node_imageio_setting_legacy(nodeclass, creator, subset): - ''' Get preset data for dataflow (fileType, compression, bitDepth) + '''[DEPRICATED] Get preset data for dataflow (fileType, compression, bitDepth) ''' assert any([creator, nodeclass]), nuke.message( @@ -971,27 +1100,14 @@ def format_anatomy(data): Return: path (str) ''' - # TODO: perhaps should be nonPublic - anatomy = Anatomy() log.debug("__ anatomy.templates: {}".format(anatomy.templates)) - try: - # TODO: bck compatibility with old anatomy template - padding = int( - anatomy.templates["render"].get( - "frame_padding", - anatomy.templates["render"].get("padding") - ) + padding = int( + anatomy.templates["render"].get( + "frame_padding" ) - except KeyError as e: - msg = ("`padding` key is not in `render` " - "or `frame_padding` on is not available in " - "Anatomy template. Please, add it there and restart " - "the pipeline (padding: \"4\"): `{}`").format(e) - - log.error(msg) - nuke.message(msg) + ) version = data.get("version", None) if not version: @@ -999,16 +1115,16 @@ def format_anatomy(data): data["version"] = get_version_from_path(file) project_name = anatomy.project_name - asset_name = data["avalon"]["asset"] - task_name = os.environ["AVALON_TASK"] + asset_name = data["asset"] + task_name = data["task"] host_name = os.environ["AVALON_APP"] context_data = get_template_data_with_names( project_name, asset_name, task_name, host_name ) data.update(context_data) data.update({ - "subset": data["avalon"]["subset"], - "family": data["avalon"]["family"], + "subset": data["subset"], + "family": data["family"], "frame": "#" * padding, }) return anatomy.format(data) @@ -1100,8 +1216,6 @@ def create_write_node( data, input=None, prenodes=None, - review=True, - farm=True, linked_knobs=None, **kwargs ): @@ -1143,35 +1257,26 @@ def create_write_node( ''' prenodes = prenodes or {} - # group node knob overrides - knob_overrides = data.pop("knobs", []) - # filtering variables plugin_name = data["creator"] subset = data["subset"] # get knob settings for write node imageio_writes = get_imageio_node_setting( - node_class=data["nodeclass"], + node_class="Write", plugin_name=plugin_name, subset=subset ) for knob in imageio_writes["knobs"]: if knob["name"] == "file_type": - representation = knob["value"] + ext = knob["value"] - try: - data.update({ - "imageio_writes": imageio_writes, - "representation": representation, - }) - anatomy_filled = format_anatomy(data) - - except Exception as e: - msg = "problem with resolving anatomy template: {}".format(e) - log.error(msg) - nuke.message(msg) + data.update({ + "imageio_writes": imageio_writes, + "ext": ext + }) + anatomy_filled = format_anatomy(data) # build file path to workfiles fdir = str(anatomy_filled["work"]["folder"]).replace("\\", "/") @@ -1180,7 +1285,7 @@ def create_write_node( version=data["version"], subset=data["subset"], frame=data["frame"], - ext=representation + ext=ext ) # create directory @@ -1234,14 +1339,6 @@ def create_write_node( # connect to previous node now_node.setInput(0, prev_node) - # imprinting group node - set_avalon_knob_data(GN, data["avalon"]) - add_publish_knob(GN) - add_rendering_knobs(GN, farm) - - if review: - add_review_knob(GN) - # add divider GN.addKnob(nuke.Text_Knob('', 'Rendering')) @@ -1287,11 +1384,7 @@ def create_write_node( # adding write to read button add_button_clear_rendered(GN, os.path.dirname(fpath)) - # Deadline tab. - add_deadline_tab(GN) - - # open the our Tab as default - GN[_NODE_TAB_NAME].setFlag(0) + GN.addKnob(nuke.Text_Knob('', '')) # set tile color tile_color = next( @@ -1303,12 +1396,10 @@ def create_write_node( GN["tile_color"].setValue( color_gui_to_int(tile_color)) - # finally add knob overrides - set_node_knobs_from_settings(GN, knob_overrides, **kwargs) - return GN +@deprecated("openpype.hosts.nuke.api.lib.create_write_node") def create_write_node_legacy( name, data, input=None, prenodes=None, review=True, linked_knobs=None, farm=True @@ -1599,6 +1690,13 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs): if knob_name not in node.knobs(): continue + if knob_type == "expression": + knob_expression = knob["expression"] + node[knob_name].setExpression( + knob_expression + ) + continue + # first deal with formatable knob settings if knob_type == "formatable": template = knob["template"] @@ -1607,7 +1705,6 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs): _knob_value = template.format( **kwargs ) - log.debug("__ knob_value0: {}".format(_knob_value)) except KeyError as msg: log.warning("__ msg: {}".format(msg)) raise KeyError(msg) @@ -1661,6 +1758,7 @@ def color_gui_to_int(color_gui): return int(hex_value, 16) +@deprecated def add_rendering_knobs(node, farm=True): ''' Adds additional rendering knobs to given node @@ -1681,6 +1779,7 @@ def add_rendering_knobs(node, farm=True): return node +@deprecated def add_review_knob(node): ''' Adds additional review knob to given node @@ -1697,7 +1796,9 @@ def add_review_knob(node): return node +@deprecated def add_deadline_tab(node): + # TODO: remove this as it is only linked to legacy create node.addKnob(nuke.Tab_Knob("Deadline")) knob = nuke.Int_Knob("deadlinePriority", "Priority") @@ -1723,7 +1824,10 @@ def add_deadline_tab(node): node.addKnob(knob) +@deprecated def get_deadline_knob_names(): + # TODO: remove this as it is only linked to legacy + # validate_write_deadline_tab return [ "Deadline", "deadlineChunkSize", @@ -2137,7 +2241,8 @@ def reset_frame_range_handles(self): range = '{0}-{1}'.format( int(data["frameStart"]), - int(data["frameEnd"])) + int(data["frameEnd"]) + ) for node in nuke.allNodes(filter="Viewer"): node['frame_range'].setValue(range) @@ -2145,12 +2250,14 @@ def reset_frame_range_handles(self): node['frame_range'].setValue(range) node['frame_range_lock'].setValue(True) - # adding handle_start/end to root avalon knob - if not set_avalon_knob_data(self._root_node, { - "handleStart": int(handle_start), - "handleEnd": int(handle_end) - }): - log.warning("Cannot set Avalon knob to Root node!") + set_node_data( + self._root_node, + INSTANCE_DATA_KNOB, + { + "handleStart": int(handle_start), + "handleEnd": int(handle_end) + } + ) def reset_resolution(self): """Set resolution to project resolution.""" @@ -2264,29 +2371,25 @@ def get_write_node_template_attr(node): ''' Gets all defined data from presets ''' + + # TODO: add identifiers to settings and rename settings key + plugin_names_mapping = { + "create_write_image": "CreateWriteImage", + "create_write_prerender": "CreateWritePrerender", + "create_write_render": "CreateWriteRender" + } # get avalon data from node - avalon_knob_data = read_avalon_data(node) - # get template data - nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["families"], - plugin_name=avalon_knob_data["creator"], - subset=avalon_knob_data["subset"] + node_data = get_node_data(node, INSTANCE_DATA_KNOB) + identifier = node_data["creator_identifier"] + + # return template data + return get_imageio_node_setting( + node_class="Write", + plugin_name=plugin_names_mapping[identifier], + subset=node_data["subset"] ) - # collecting correct data - correct_data = OrderedDict() - - # adding imageio knob presets - for k, v in nuke_imageio_writes.items(): - if k in ["_id", "_previous"]: - continue - correct_data[k] = v - - # fix badly encoded data - return fix_data_for_node_create(correct_data) - - def get_dependent_nodes(nodes): """Get all dependent nodes connected to the list of nodes. @@ -2325,10 +2428,11 @@ def get_dependent_nodes(nodes): def find_free_space_to_paste_nodes( - nodes, - group=nuke.root(), - direction="right", - offset=300): + nodes, + group=nuke.root(), + direction="right", + offset=300 +): """ For getting coordinates in DAG (node graph) for placing new nodes @@ -2554,6 +2658,7 @@ def process_workfile_builder(): open_file(last_workfile_path) +@deprecated def recreate_instance(origin_node, avalon_data=None): """Recreate input instance to different data @@ -2619,6 +2724,32 @@ def recreate_instance(origin_node, avalon_data=None): return new_node +def add_scripts_menu(): + try: + from scriptsmenu import launchfornuke + except ImportError: + log.warning( + "Skipping studio.menu install, because " + "'scriptsmenu' module seems unavailable." + ) + return + + # load configuration of custom menu + project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) + config = project_settings["nuke"]["scriptsmenu"]["definition"] + _menu = project_settings["nuke"]["scriptsmenu"]["name"] + + if not config: + log.warning("Skipping studio menu, no definition found.") + return + + # run the launcher for Maya menu + studio_menu = launchfornuke.main(title=_menu.title()) + + # apply configuration + studio_menu.build_from_configuration(studio_menu, config) + + def add_scripts_gizmo(): # load configuration of custom menu @@ -2799,48 +2930,6 @@ def dirmap_file_name_filter(file_name): return file_name -# ------------------------------------ -# This function seems to be deprecated -# ------------------------------------ -def ls_img_sequence(path): - """Listing all available coherent image sequence from path - - Arguments: - path (str): A nuke's node object - - Returns: - data (dict): with nuke formated path and frameranges - """ - file = os.path.basename(path) - dirpath = os.path.dirname(path) - base, ext = os.path.splitext(file) - name, padding = os.path.splitext(base) - - # populate list of files - files = [ - f for f in os.listdir(dirpath) - if name in f - if ext in f - ] - - # create collection from list of files - collections, reminder = clique.assemble(files) - - if len(collections) > 0: - head = collections[0].format("{head}") - padding = collections[0].format("{padding}") % 1 - padding = "#" * len(padding) - tail = collections[0].format("{tail}") - file = head + padding + tail - - return { - "path": os.path.join(dirpath, file).replace("\\", "/"), - "frames": collections[0].format("[{ranges}]") - } - - return False - - def get_group_io_nodes(nodes): """Get the input and the output of a group of nodes.""" diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index bdf12b7dc49..6dec60d81af 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -1,21 +1,24 @@ +import nuke + import os import importlib from collections import OrderedDict -import nuke - import pyblish.api import openpype +from openpype.host import ( + HostBase, + IWorkfileHost, + ILoadHost, + IPublishHost +) from openpype.settings import get_current_project_settings from openpype.lib import register_event_callback, Logger from openpype.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, register_inventory_action_path, - deregister_loader_plugin_path, - deregister_creator_plugin_path, - deregister_inventory_action_path, AVALON_CONTAINER_ID, ) from openpype.pipeline.workfile import BuildWorkfile @@ -24,6 +27,8 @@ from .command import viewer_update_and_undo_stop from .lib import ( Context, + ROOT_DATA_KNOB, + INSTANCE_DATA_KNOB, get_main_window, add_publish_knob, WorkfileSettings, @@ -32,6 +37,12 @@ check_inventory_versions, set_avalon_knob_data, read_avalon_data, + on_script_load, + dirmap_file_name_filter, + add_scripts_menu, + add_scripts_gizmo, + get_node_data, + set_node_data ) from .workfile_template_builder import ( NukePlaceholderLoadPlugin, @@ -41,6 +52,14 @@ create_placeholder, update_placeholder, ) +from .workio import ( + open_file, + save_file, + file_extensions, + has_unsaved_changes, + work_root, + current_file +) log = Logger.get_logger(__name__) @@ -59,6 +78,95 @@ pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None)) +class NukeHost( + HostBase, IWorkfileHost, ILoadHost, IPublishHost +): + name = "nuke" + + def open_workfile(self, filepath): + return open_file(filepath) + + def save_workfile(self, filepath=None): + return save_file(filepath) + + def work_root(self, session): + return work_root(session) + + def get_current_workfile(self): + return current_file() + + def workfile_has_unsaved_changes(self): + return has_unsaved_changes() + + def get_workfile_extensions(self): + return file_extensions() + + def get_containers(self): + return ls() + + def install(self): + ''' Installing all requarements for Nuke host + ''' + + pyblish.api.register_host("nuke") + + self.log.info("Registering Nuke plug-ins..") + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) + register_inventory_action_path(INVENTORY_PATH) + + # Register Avalon event for workfiles loading. + register_event_callback("workio.open_file", check_inventory_versions) + register_event_callback("taskChanged", change_context_label) + + pyblish.api.register_callback( + "instanceToggled", on_pyblish_instance_toggled) + + _install_menu() + + # add script menu + add_scripts_menu() + add_scripts_gizmo() + + add_nuke_callbacks() + + launch_workfiles_app() + + def get_context_data(self): + root_node = nuke.root() + return get_node_data(root_node, ROOT_DATA_KNOB) + + def update_context_data(self, data, changes): + root_node = nuke.root() + set_node_data(root_node, ROOT_DATA_KNOB, data) + + +def add_nuke_callbacks(): + """ Adding all available nuke callbacks + """ + workfile_settings = WorkfileSettings() + # Set context settings. + nuke.addOnCreate( + workfile_settings.set_context_settings, nodeClass="Root") + nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") + nuke.addOnCreate(process_workfile_builder, nodeClass="Root") + + # fix ffmpeg settings on script + nuke.addOnScriptLoad(on_script_load) + + # set checker for last versions on loaded containers + nuke.addOnScriptLoad(check_inventory_versions) + nuke.addOnScriptSave(check_inventory_versions) + + # # set apply all workfile settings on script load and save + nuke.addOnScriptLoad(WorkfileSettings().set_context_settings) + + nuke.addFilenameFilter(dirmap_file_name_filter) + + log.info("Added Nuke callbacks ...") + + def reload_config(): """Attempt to reload pipeline at run-time. @@ -84,52 +192,6 @@ def reload_config(): reload(module) -def install(): - ''' Installing all requarements for Nuke host - ''' - - pyblish.api.register_host("nuke") - - log.info("Registering Nuke plug-ins..") - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) - register_inventory_action_path(INVENTORY_PATH) - - # Register Avalon event for workfiles loading. - register_event_callback("workio.open_file", check_inventory_versions) - register_event_callback("taskChanged", change_context_label) - - pyblish.api.register_callback( - "instanceToggled", on_pyblish_instance_toggled) - workfile_settings = WorkfileSettings() - - # Set context settings. - nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root") - nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") - nuke.addOnCreate(process_workfile_builder, nodeClass="Root") - - _install_menu() - launch_workfiles_app() - - -def uninstall(): - '''Uninstalling host's integration - ''' - log.info("Deregistering Nuke plug-ins..") - pyblish.deregister_host("nuke") - pyblish.api.deregister_plugin_path(PUBLISH_PATH) - deregister_loader_plugin_path(LOAD_PATH) - deregister_creator_plugin_path(CREATE_PATH) - deregister_inventory_action_path(INVENTORY_PATH) - - pyblish.api.deregister_callback( - "instanceToggled", on_pyblish_instance_toggled) - - reload_config() - _uninstall_menu() - - def _show_workfiles(): # Make sure parent is not set # - this makes Workfiles tool as separated window which @@ -167,7 +229,15 @@ def _install_menu(): menu.addSeparator() menu.addCommand( "Create...", - lambda: host_tools.show_creator(parent=main_window) + lambda: host_tools.show_publisher( + tab="create" + ) + ) + menu.addCommand( + "Publish...", + lambda: host_tools.show_publisher( + tab="publish" + ) ) menu.addCommand( "Load...", @@ -176,14 +246,11 @@ def _install_menu(): use_context=True ) ) - menu.addCommand( - "Publish...", - lambda: host_tools.show_publish(parent=main_window) - ) menu.addCommand( "Manage...", lambda: host_tools.show_scene_inventory(parent=main_window) ) + menu.addSeparator() menu.addCommand( "Library...", lambda: host_tools.show_library_loader( @@ -233,7 +300,7 @@ def _install_menu(): "Experimental tools...", lambda: host_tools.show_experimental_tools_dialog(parent=main_window) ) - + menu.addSeparator() # add reload pipeline only in debug mode if bool(os.getenv("NUKE_DEBUG")): menu.addSeparator() @@ -243,15 +310,6 @@ def _install_menu(): add_shortcuts_from_presets() -def _uninstall_menu(): - menubar = nuke.menu("Nuke") - menu = menubar.findItem(MENU_LABEL) - - for item in menu.items(): - log.info("Removing menu item: {}".format(item.name())) - menu.removeItem(item.name()) - - def change_context_label(): menubar = nuke.menu("Nuke") menu = menubar.findItem(MENU_LABEL) @@ -283,8 +341,8 @@ def add_shortcuts_from_presets(): if nuke_presets.get("menu"): menu_label_mapping = { - "manage": "Manage...", "create": "Create...", + "manage": "Manage...", "load": "Load...", "build_workfile": "Build Workfile", "publish": "Publish..." @@ -302,7 +360,7 @@ def add_shortcuts_from_presets(): item_label = menu_label_mapping[command_name] menuitem = menu.findItem(item_label) menuitem.setShortcut(shortcut_str) - except AttributeError as e: + except (AttributeError, KeyError) as e: log.error(e) @@ -434,11 +492,72 @@ def ls(): """ all_nodes = nuke.allNodes(recurseGroups=False) - # TODO: add readgeo, readcamera, readimage nodes = [n for n in all_nodes] for n in nodes: - log.debug("name: `{}`".format(n.name())) container = parse_container(n) if container: yield container + + +def list_instances(creator_id=None): + """List all created instances to publish from current workfile. + + For SubsetManager + + Returns: + (list) of dictionaries matching instances format + """ + listed_instances = [] + for node in nuke.allNodes(recurseGroups=True): + + if node.Class() in ["Viewer", "Dot"]: + continue + + try: + if node["disable"].value(): + continue + except NameError: + # pass if disable knob doesn't exist + pass + + # get data from avalon knob + instance_data = get_node_data( + node, INSTANCE_DATA_KNOB) + + if not instance_data: + continue + + if instance_data["id"] != "pyblish.avalon.instance": + continue + + if creator_id and instance_data["creator_identifier"] != creator_id: + continue + + listed_instances.append((node, instance_data)) + + return listed_instances + + +def remove_instance(instance): + """Remove instance from current workfile metadata. + + For SubsetManager + + Args: + instance (dict): instance representation from subsetmanager model + """ + instance_node = instance.transient_data["node"] + instance_knob = instance_node.knobs()[INSTANCE_DATA_KNOB] + instance_node.removeKnob(instance_knob) + + +def select_instance(instance): + """ + Select instance in Node View + + Args: + instance (dict): instance representation from subsetmanager model + """ + instance_node = instance.transient_data["node"] + instance_node["selected"].setValue(True) diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index b213b896092..abf9144c5b2 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -1,27 +1,373 @@ +import nuke +import re import os +import sys +import six import random import string -from collections import OrderedDict +from collections import OrderedDict, defaultdict from abc import abstractmethod -import nuke +from openpype.client import ( + get_asset_by_name, + get_subsets, +) from openpype.settings import get_current_project_settings +from openpype.lib import ( + BoolDef, + EnumDef +) from openpype.pipeline import ( LegacyCreator, LoaderPlugin, + CreatorError, + Creator as NewCreator, + CreatedInstance, + legacy_io ) from .lib import ( + INSTANCE_DATA_KNOB, Knobby, check_subsetname_exists, maintained_selection, + get_avalon_knob_data, set_avalon_knob_data, add_publish_knob, get_nuke_imageio_settings, set_node_knobs_from_settings, + set_node_data, + get_node_data, get_view_process_node, - get_viewer_config_from_string + get_viewer_config_from_string, + deprecated ) +from .pipeline import ( + list_instances, + remove_instance +) + + +def _collect_and_cache_nodes(creator): + key = "openpype.nuke.nodes" + if key not in creator.collection_shared_data: + instances_by_identifier = defaultdict(list) + for item in list_instances(): + _, instance_data = item + identifier = instance_data["creator_identifier"] + instances_by_identifier[identifier].append(item) + creator.collection_shared_data[key] = instances_by_identifier + return creator.collection_shared_data[key] + + +class NukeCreatorError(CreatorError): + pass + + +class NukeCreator(NewCreator): + selected_nodes = [] + + def pass_pre_attributes_to_instance( + self, + instance_data, + pre_create_data, + keys=None + ): + if not keys: + keys = pre_create_data.keys() + + creator_attrs = instance_data["creator_attributes"] = {} + for pass_key in keys: + creator_attrs[pass_key] = pre_create_data[pass_key] + + def add_info_knob(self, node): + if "OP_info" in node.knobs().keys(): + return + + # add info text + info_knob = nuke.Text_Knob("OP_info", "") + info_knob.setValue(""" + +

This node is maintained by OpenPype Publisher.

+

To remove it use Publisher gui.

+
+ """) + node.addKnob(info_knob) + + def check_existing_subset(self, subset_name, instance_data): + """Check if existing subset name already exists.""" + exists = False + for node in nuke.allNodes(recurseGroups=True): + node_data = get_node_data(node, INSTANCE_DATA_KNOB) + if subset_name in node_data.get("subset"): + exists = True + + return exists + + def create_instance_node( + self, + node_name, + knobs=None, + parent=None, + node_type=None + ): + """Create node representing instance. + + Arguments: + node_name (str): Name of the new node. + knobs (OrderedDict): node knobs name and values + parent (str): Name of the parent node. + node_type (str, optional): Nuke node Class. + + Returns: + nuke.Node: Newly created instance node. + + """ + node_type = node_type or "NoOp" + + node_knobs = knobs or {} + + # set parent node + parent_node = nuke.root() + if parent: + parent_node = nuke.toNode(parent) + + try: + with parent_node: + created_node = nuke.createNode(node_type) + created_node["name"].setValue(node_name) + + self.add_info_knob(created_node) + + for key, values in node_knobs.items(): + if key in created_node.knobs(): + created_node["key"].setValue(values) + except Exception as _err: + raise NukeCreatorError("Creating have failed: {}".format(_err)) + + return created_node + + def set_selected_nodes(self, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = nuke.selectedNodes() + if self.selected_nodes == []: + raise NukeCreatorError("Creator error: No active selection") + else: + self.selected_nodes = [] + + def create(self, subset_name, instance_data, pre_create_data): + + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) + + # make sure subset name is unique + if self.check_existing_subset(subset_name, instance_data): + raise NukeCreatorError( + ("subset {} is already published" + "definition.").format(subset_name)) + + try: + instance_node = self.create_instance_node( + subset_name, + node_type=instance_data.pop("node_type", None) + ) + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self + ) + + instance.transient_data["node"] = instance_node + + self._add_instance_to_context(instance) + + set_node_data( + instance_node, INSTANCE_DATA_KNOB, instance.data_to_store()) + + return instance + + except Exception as er: + six.reraise( + NukeCreatorError, + NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) + + def collect_instances(self): + cached_instances = _collect_and_cache_nodes(self) + for (node, data) in cached_instances[self.identifier]: + created_instance = CreatedInstance.from_existing( + data, self + ) + created_instance.transient_data["node"] = node + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + instance_node = created_inst.transient_data["node"] + + # in case node is not existing anymore (user erased it manually) + try: + instance_node.fullName() + except ValueError: + self.remove_instances([created_inst]) + continue + + set_node_data( + instance_node, + INSTANCE_DATA_KNOB, + created_inst.data_to_store() + ) + + def remove_instances(self, instances): + for instance in instances: + remove_instance(instance) + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] + + def get_creator_settings(self, project_settings, settings_key=None): + if not settings_key: + settings_key = self.__class__.__name__ + return project_settings["nuke"]["create"][settings_key] + + +class NukeWriteCreator(NukeCreator): + """Add Publishable Write node""" + + identifier = "create_write" + label = "Create Write" + family = "write" + icon = "sign-out" + + def integrate_links(self, node, outputs=True): + # skip if no selection + if not self.selected_node: + return + + # collect dependencies + input_nodes = [self.selected_node] + dependent_nodes = self.selected_node.dependent() if outputs else [] + + # relinking to collected connections + for i, input in enumerate(input_nodes): + node.setInput(i, input) + + # make it nicer in graph + node.autoplace() + + # relink also dependent nodes + for dep_nodes in dependent_nodes: + dep_nodes.setInput(0, node) + + def set_selected_nodes(self, pre_create_data): + if pre_create_data.get("use_selection"): + selected_nodes = nuke.selectedNodes() + if selected_nodes == []: + raise NukeCreatorError("Creator error: No active selection") + elif len(selected_nodes) > 1: + NukeCreatorError("Creator error: Select only one camera node") + self.selected_node = selected_nodes[0] + else: + self.selected_node = None + + def get_pre_create_attr_defs(self): + attr_defs = [ + BoolDef("use_selection", label="Use selection"), + self._get_render_target_enum() + ] + return attr_defs + + def get_instance_attr_defs(self): + attr_defs = [ + self._get_render_target_enum(), + self._get_reviewable_bool() + ] + return attr_defs + + def _get_render_target_enum(self): + rendering_targets = { + "local": "Local machine rendering", + "frames": "Use existing frames" + } + if ("farm_rendering" in self.instance_attributes): + rendering_targets["farm"] = "Farm rendering" + + return EnumDef( + "render_target", + items=rendering_targets, + label="Render target" + ) + + def _get_reviewable_bool(self): + return BoolDef( + "review", + default=("reviewable" in self.instance_attributes), + label="Review" + ) + + def create(self, subset_name, instance_data, pre_create_data): + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) + + # make sure subset name is unique + if self.check_existing_subset(subset_name, instance_data): + raise NukeCreatorError( + ("subset {} is already published" + "definition.").format(subset_name)) + + instance_node = self.create_instance_node( + subset_name, + instance_data + ) + + try: + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self + ) + + instance.transient_data["node"] = instance_node + + self._add_instance_to_context(instance) + + set_node_data( + instance_node, INSTANCE_DATA_KNOB, instance.data_to_store()) + + return instance + + except Exception as er: + six.reraise( + NukeCreatorError, + NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2] + ) + + def apply_settings( + self, + project_settings, + system_settings + ): + """Method called on initialization of plugin to apply settings.""" + + # plugin settings + plugin_settings = self.get_creator_settings(project_settings) + + # individual attributes + self.instance_attributes = plugin_settings.get( + "instance_attributes") or self.instance_attributes + self.prenodes = plugin_settings["prenodes"] + self.default_variants = plugin_settings.get( + "default_variants") or self.default_variants + self.temp_rendering_path_template = ( + plugin_settings.get("temp_rendering_path_template") + or self.temp_rendering_path_template + ) class OpenPypeCreator(LegacyCreator): @@ -72,6 +418,41 @@ def process(self): return instance +def get_instance_group_node_childs(instance): + """Return list of instance group node children + + Args: + instance (pyblish.Instance): pyblish instance + + Returns: + list: [nuke.Node] + """ + node = instance.data["transientData"]["node"] + + if node.Class() != "Group": + return + + # collect child nodes + child_nodes = [] + # iterate all nodes + for node in nuke.allNodes(group=node): + # add contained nodes to instance's node list + child_nodes.append(node) + + return child_nodes + + +def get_colorspace_from_node(node): + # Add version data to instance + colorspace = node["colorspace"].value() + + # remove default part of the string + if "default (" in colorspace: + colorspace = re.sub(r"default.\(|\)", "", colorspace) + + return colorspace + + def get_review_presets_config(): settings = get_current_project_settings() review_profiles = ( @@ -173,7 +554,6 @@ def __init__(self, def get_file_info(self): if self.collection: - self.log.debug("Collection: `{}`".format(self.collection)) # get path self.fname = os.path.basename(self.collection.format( "{head}{padding}{tail}")) @@ -308,7 +688,6 @@ def generate_lut(self, **kwargs): # connect self._temp_nodes.append(cms_node) self.previous_node = cms_node - self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes)) if bake_viewer_process: # Node View Process @@ -341,8 +720,6 @@ def generate_lut(self, **kwargs): # connect gen_lut_node.setInput(0, self.previous_node) self._temp_nodes.append(gen_lut_node) - self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes)) - # ---------- end nodes creation # Export lut file @@ -356,8 +733,6 @@ def generate_lut(self, **kwargs): # ---------- generate representation data self.get_representation_data() - self.log.debug("Representation... `{}`".format(self.data)) - # ---------- Clean up self.clean_nodes() @@ -583,6 +958,7 @@ def generate_mov(self, farm=False, **kwargs): return self.data +@deprecated("openpype.hosts.nuke.api.plugin.NukeWriteCreator") class AbstractWriteRender(OpenPypeCreator): """Abstract creator to gather similar implementation for Write creators""" name = "" @@ -609,7 +985,6 @@ def __init__(self, *args, **kwargs): self.data = data self.nodes = nuke.selectedNodes() - self.log.debug("_ self.data: '{}'".format(self.data)) def process(self): @@ -734,3 +1109,149 @@ def _modify_write_node(self, write_node): node (nuke.Node): group node with data as Knobs """ pass + + +def convert_to_valid_instaces(): + """ Check and convert to latest publisher instances + + Also save as new minor version of workfile. + """ + def family_to_identifier(family): + mapping = { + "render": "create_write_render", + "prerender": "create_write_prerender", + "still": "create_write_image", + "model": "create_model", + "camera": "create_camera", + "nukenodes": "create_backdrop", + "gizmo": "create_gizmo", + "source": "create_source" + + } + return mapping[family] + + from openpype.hosts.nuke.api import workio + + task_name = legacy_io.Session["AVALON_TASK"] + + # save into new workfile + current_file = workio.current_file() + + # add file suffex if not + if "_publisherConvert" not in current_file: + new_workfile = ( + current_file[:-3] + + "_publisherConvert" + + current_file[-3:] + ) + else: + new_workfile = current_file + + path = new_workfile.replace("\\", "/") + nuke.scriptSaveAs(new_workfile, overwrite=1) + nuke.Root()["name"].setValue(path) + nuke.Root()["project_directory"].setValue(os.path.dirname(path)) + nuke.Root().setModified(False) + + _remove_old_knobs(nuke.Root()) + + # loop all nodes and convert + for node in nuke.allNodes(recurseGroups=True): + transfer_data = { + "creator_attributes": {} + } + creator_attr = transfer_data["creator_attributes"] + + if node.Class() in ["Viewer", "Dot"]: + continue + + if get_node_data(node, INSTANCE_DATA_KNOB): + continue + + # get data from avalon knob + avalon_knob_data = get_avalon_knob_data( + node, ["avalon:", "ak:"]) + + if not avalon_knob_data: + continue + + if avalon_knob_data["id"] != "pyblish.avalon.instance": + continue + + transfer_data.update({ + k: v for k, v in avalon_knob_data.items() + if k not in ["families", "creator"] + }) + + transfer_data["task"] = task_name + + family = avalon_knob_data["family"] + # establish families + families_ak = avalon_knob_data.get("families", []) + + if "suspend_publish" in node.knobs(): + creator_attr["suspended_publish"] = ( + node["suspend_publish"].value()) + + # get review knob value + if "review" in node.knobs(): + creator_attr["review"] = ( + node["review"].value()) + + if "publish" in node.knobs(): + transfer_data["active"] = ( + node["publish"].value()) + + # add idetifier + transfer_data["creator_identifier"] = family_to_identifier(family) + + # Add all nodes in group instances. + if node.Class() == "Group": + # only alter families for render family + if families_ak and "write" in families_ak.lower(): + target = node["render"].value() + if target == "Use existing frames": + creator_attr["render_target"] = "frames" + elif target == "Local": + # Local rendering + creator_attr["render_target"] = "local" + elif target == "On farm": + # Farm rendering + creator_attr["render_target"] = "farm" + + if "deadlinePriority" in node.knobs(): + transfer_data["farm_priority"] = ( + node["deadlinePriority"].value()) + if "deadlineChunkSize" in node.knobs(): + creator_attr["farm_chunk"] = ( + node["deadlineChunkSize"].value()) + if "deadlineConcurrentTasks" in node.knobs(): + creator_attr["farm_concurency"] = ( + node["deadlineConcurrentTasks"].value()) + + _remove_old_knobs(node) + + # add new instance knob with transfer data + set_node_data( + node, INSTANCE_DATA_KNOB, transfer_data) + + nuke.scriptSave() + + +def _remove_old_knobs(node): + remove_knobs = [ + "review", "publish", "render", "suspend_publish", "warn", "divd", + "OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority", + "deadlineChunkSize", "deadlineConcurrentTasks", "Deadline" + ] + print(node.name()) + + # remove all old knobs + for knob in node.allKnobs(): + try: + if knob.name() in remove_knobs: + node.removeKnob(knob) + elif "avalon" in knob.name(): + node.removeKnob(knob) + except ValueError: + pass diff --git a/openpype/hosts/nuke/plugins/create/convert_legacy.py b/openpype/hosts/nuke/plugins/create/convert_legacy.py new file mode 100644 index 00000000000..d7341c625f4 --- /dev/null +++ b/openpype/hosts/nuke/plugins/create/convert_legacy.py @@ -0,0 +1,49 @@ +from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin +from openpype.hosts.nuke.api.lib import ( + INSTANCE_DATA_KNOB, + get_node_data, + get_avalon_knob_data +) +from openpype.hosts.nuke.api.plugin import convert_to_valid_instaces + +import nuke + + +class LegacyConverted(SubsetConvertorPlugin): + identifier = "legacy.converter" + + def find_instances(self): + + legacy_found = False + # search for first available legacy item + for node in nuke.allNodes(recurseGroups=True): + + if node.Class() in ["Viewer", "Dot"]: + continue + + if get_node_data(node, INSTANCE_DATA_KNOB): + continue + + # get data from avalon knob + avalon_knob_data = get_avalon_knob_data( + node, ["avalon:", "ak:"], create=False) + + if not avalon_knob_data: + continue + + if avalon_knob_data["id"] != "pyblish.avalon.instance": + continue + + # catch and break + legacy_found = True + break + + if legacy_found: + # if not item do not add legacy instance convertor + self.add_convertor_item("Convert legacy instances") + + def convert(self): + # loop all instances and convert them + convert_to_valid_instaces() + # remove legacy item if all is fine + self.remove_convertor_item() diff --git a/openpype/hosts/nuke/plugins/create/create_backdrop.py b/openpype/hosts/nuke/plugins/create/create_backdrop.py index 0c11b3f2740..ebc66e95a77 100644 --- a/openpype/hosts/nuke/plugins/create/create_backdrop.py +++ b/openpype/hosts/nuke/plugins/create/create_backdrop.py @@ -1,56 +1,57 @@ -import nuke -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - select_nodes, - set_avalon_knob_data +from nukescripts import autoBackdrop + +from openpype.hosts.nuke.api import ( + NukeCreator, + NukeCreatorError, + maintained_selection, + select_nodes + ) -class CreateBackdrop(plugin.OpenPypeCreator): +class CreateBackdrop(NukeCreator): """Add Publishable Backdrop""" - name = "nukenodes" - label = "Create Backdrop" + identifier = "create_backdrop" + label = "Nukenodes (backdrop)" family = "nukenodes" icon = "file-archive-o" - defaults = ["Main"] - - def __init__(self, *args, **kwargs): - super(CreateBackdrop, self).__init__(*args, **kwargs) - self.nodes = nuke.selectedNodes() - self.node_color = "0xdfea5dff" - return - - def process(self): - from nukescripts import autoBackdrop - nodes = list() - if (self.options or {}).get("useSelection"): - nodes = self.nodes - - if len(nodes) >= 1: - select_nodes(nodes) - bckd_node = autoBackdrop() - bckd_node["name"].setValue("{}_BDN".format(self.name)) - bckd_node["tile_color"].setValue(int(self.node_color, 16)) - bckd_node["note_font_size"].setValue(24) - bckd_node["label"].setValue("[{}]".format(self.name)) - # add avalon knobs - instance = set_avalon_knob_data(bckd_node, self.data) - - return instance - else: - msg = str("Please select nodes you " - "wish to add to a container") - self.log.error(msg) - nuke.message(msg) - return - else: - bckd_node = autoBackdrop() - bckd_node["name"].setValue("{}_BDN".format(self.name)) - bckd_node["tile_color"].setValue(int(self.node_color, 16)) - bckd_node["note_font_size"].setValue(24) - bckd_node["label"].setValue("[{}]".format(self.name)) - # add avalon knobs - instance = set_avalon_knob_data(bckd_node, self.data) - - return instance + maintain_selection = True + + # plugin attributes + node_color = "0xdfea5dff" + + def create_instance_node( + self, + node_name, + knobs=None, + parent=None, + node_type=None + ): + with maintained_selection(): + if len(self.selected_nodes) >= 1: + select_nodes(self.selected_nodes) + + created_node = autoBackdrop() + created_node["name"].setValue(node_name) + created_node["tile_color"].setValue(int(self.node_color, 16)) + created_node["note_font_size"].setValue(24) + created_node["label"].setValue("[{}]".format(node_name)) + + self.add_info_knob(created_node) + + return created_node + + def create(self, subset_name, instance_data, pre_create_data): + if self.check_existing_subset(subset_name, instance_data): + raise NukeCreatorError( + ("Subset name '{}' is already used. " + "Please specify different Variant.").format(subset_name)) + + instance = super(CreateBackdrop, self).create( + subset_name, + instance_data, + pre_create_data + ) + + return instance diff --git a/openpype/hosts/nuke/plugins/create/create_camera.py b/openpype/hosts/nuke/plugins/create/create_camera.py index 3b13c80dc4c..dc4a30f513d 100644 --- a/openpype/hosts/nuke/plugins/create/create_camera.py +++ b/openpype/hosts/nuke/plugins/create/create_camera.py @@ -1,55 +1,70 @@ import nuke -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - set_avalon_knob_data +from openpype.hosts.nuke.api import ( + NukeCreator, + NukeCreatorError, + maintained_selection ) -class CreateCamera(plugin.OpenPypeCreator): - """Add Publishable Backdrop""" +class CreateCamera(NukeCreator): + """Add Publishable Camera""" - name = "camera" - label = "Create 3d Camera" + identifier = "create_camera" + label = "Camera (3d)" family = "camera" icon = "camera" - defaults = ["Main"] - - def __init__(self, *args, **kwargs): - super(CreateCamera, self).__init__(*args, **kwargs) - self.nodes = nuke.selectedNodes() - self.node_color = "0xff9100ff" - return - - def process(self): - nodes = list() - if (self.options or {}).get("useSelection"): - nodes = self.nodes - - if len(nodes) >= 1: - # loop selected nodes - for n in nodes: - data = self.data.copy() - if len(nodes) > 1: - # rename subset name only if more - # then one node are selected - subset = self.family + n["name"].value().capitalize() - data["subset"] = subset - - # change node color - n["tile_color"].setValue(int(self.node_color, 16)) - # add avalon knobs - set_avalon_knob_data(n, data) - return True + + # plugin attributes + node_color = "0xff9100ff" + + def create_instance_node( + self, + node_name, + knobs=None, + parent=None, + node_type=None + ): + with maintained_selection(): + if self.selected_nodes: + node = self.selected_nodes[0] + if node.Class() != "Camera3": + raise NukeCreatorError( + "Creator error: Select only camera node type") + created_node = self.selected_nodes[0] else: - msg = str("Please select nodes you " - "wish to add to a container") - self.log.error(msg) - nuke.message(msg) - return + created_node = nuke.createNode("Camera2") + + created_node["tile_color"].setValue( + int(self.node_color, 16)) + + created_node["name"].setValue(node_name) + + self.add_info_knob(created_node) + + return created_node + + def create(self, subset_name, instance_data, pre_create_data): + if self.check_existing_subset(subset_name, instance_data): + raise NukeCreatorError( + ("Subset name '{}' is already used. " + "Please specify different Variant.").format(subset_name)) + + instance = super(CreateCamera, self).create( + subset_name, + instance_data, + pre_create_data + ) + + return instance + + def set_selected_nodes(self, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = nuke.selectedNodes() + if self.selected_nodes == []: + raise NukeCreatorError( + "Creator error: No active selection") + elif len(self.selected_nodes) > 1: + raise NukeCreatorError( + "Creator error: Select only one camera node") else: - # if selected is off then create one node - camera_node = nuke.createNode("Camera2") - camera_node["tile_color"].setValue(int(self.node_color, 16)) - # add avalon knobs - instance = set_avalon_knob_data(camera_node, self.data) - return instance + self.selected_nodes = [] diff --git a/openpype/hosts/nuke/plugins/create/create_gizmo.py b/openpype/hosts/nuke/plugins/create/create_gizmo.py index d616f6f7ad7..1869874e226 100644 --- a/openpype/hosts/nuke/plugins/create/create_gizmo.py +++ b/openpype/hosts/nuke/plugins/create/create_gizmo.py @@ -1,87 +1,69 @@ import nuke - -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - maintained_selection, - select_nodes, - set_avalon_knob_data +from openpype.hosts.nuke.api import ( + NukeCreator, + NukeCreatorError, + maintained_selection ) -class CreateGizmo(plugin.OpenPypeCreator): - """Add Publishable "gizmo" group - - The name is symbolically gizmo as presumably - it is something familiar to nuke users as group of nodes - distributed downstream in workflow - """ +class CreateGizmo(NukeCreator): + """Add Publishable Group as gizmo""" - name = "gizmo" - label = "Gizmo" + identifier = "create_gizmo" + label = "Gizmo (group)" family = "gizmo" icon = "file-archive-o" - defaults = ["ViewerInput", "Lut", "Effect"] + default_variants = ["ViewerInput", "Lut", "Effect"] - def __init__(self, *args, **kwargs): - super(CreateGizmo, self).__init__(*args, **kwargs) - self.nodes = nuke.selectedNodes() - self.node_color = "0x7533c1ff" - return + # plugin attributes + node_color = "0x7533c1ff" - def process(self): - if (self.options or {}).get("useSelection"): - nodes = self.nodes - self.log.info(len(nodes)) - if len(nodes) == 1: - select_nodes(nodes) - node = nodes[-1] - # check if Group node - if node.Class() in "Group": - node["name"].setValue("{}_GZM".format(self.name)) - node["tile_color"].setValue(int(self.node_color, 16)) - return set_avalon_knob_data(node, self.data) - else: - msg = ("Please select a group node " - "you wish to publish as the gizmo") - self.log.error(msg) - nuke.message(msg) + def create_instance_node( + self, + node_name, + knobs=None, + parent=None, + node_type=None + ): + with maintained_selection(): + if self.selected_nodes: + node = self.selected_nodes[0] + if node.Class() != "Group": + raise NukeCreatorError( + "Creator error: Select only 'Group' node type") + created_node = node + else: + created_node = nuke.collapseToGroup() - if len(nodes) >= 2: - select_nodes(nodes) - nuke.makeGroup() - gizmo_node = nuke.selectedNode() - gizmo_node["name"].setValue("{}_GZM".format(self.name)) - gizmo_node["tile_color"].setValue(int(self.node_color, 16)) + created_node["tile_color"].setValue( + int(self.node_color, 16)) - # add sticky node with guide - with gizmo_node: - sticky = nuke.createNode("StickyNote") - sticky["label"].setValue( - "Add following:\n- set Input" - " nodes\n- set one Output1\n" - "- create User knobs on the group") + created_node["name"].setValue(node_name) - # add avalon knobs - return set_avalon_knob_data(gizmo_node, self.data) + self.add_info_knob(created_node) - else: - msg = "Please select nodes you wish to add to the gizmo" - self.log.error(msg) - nuke.message(msg) - return - else: - with maintained_selection(): - gizmo_node = nuke.createNode("Group") - gizmo_node["name"].setValue("{}_GZM".format(self.name)) - gizmo_node["tile_color"].setValue(int(self.node_color, 16)) + return created_node + + def create(self, subset_name, instance_data, pre_create_data): + if self.check_existing_subset(subset_name, instance_data): + raise NukeCreatorError( + ("Subset name '{}' is already used. " + "Please specify different Variant.").format(subset_name)) - # add sticky node with guide - with gizmo_node: - sticky = nuke.createNode("StickyNote") - sticky["label"].setValue( - "Add following:\n- add Input" - " nodes\n- add one Output1\n" - "- create User knobs on the group") + instance = super(CreateGizmo, self).create( + subset_name, + instance_data, + pre_create_data + ) - # add avalon knobs - return set_avalon_knob_data(gizmo_node, self.data) + return instance + + def set_selected_nodes(self, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = nuke.selectedNodes() + if self.selected_nodes == []: + raise NukeCreatorError("Creator error: No active selection") + elif len(self.selected_nodes) > 1: + NukeCreatorError("Creator error: Select only one 'Group' node") + else: + self.selected_nodes = [] diff --git a/openpype/hosts/nuke/plugins/create/create_model.py b/openpype/hosts/nuke/plugins/create/create_model.py index 15a4e3ab8a0..53b3a582880 100644 --- a/openpype/hosts/nuke/plugins/create/create_model.py +++ b/openpype/hosts/nuke/plugins/create/create_model.py @@ -1,87 +1,69 @@ import nuke -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - set_avalon_knob_data +from openpype.hosts.nuke.api import ( + NukeCreator, + NukeCreatorError, + maintained_selection ) -class CreateModel(plugin.OpenPypeCreator): - """Add Publishable Model Geometry""" +class CreateModel(NukeCreator): + """Add Publishable Camera""" - name = "model" - label = "Create 3d Model" + identifier = "create_model" + label = "Model (3d)" family = "model" icon = "cube" - defaults = ["Main"] + default_variants = ["Main"] - def __init__(self, *args, **kwargs): - super(CreateModel, self).__init__(*args, **kwargs) - self.nodes = nuke.selectedNodes() - self.node_color = "0xff3200ff" - return + # plugin attributes + node_color = "0xff3200ff" - def process(self): - nodes = list() - if (self.options or {}).get("useSelection"): - nodes = self.nodes - for n in nodes: - n['selected'].setValue(0) - end_nodes = list() + def create_instance_node( + self, + node_name, + knobs=None, + parent=None, + node_type=None + ): + with maintained_selection(): + if self.selected_nodes: + node = self.selected_nodes[0] + if node.Class() != "Scene": + raise NukeCreatorError( + "Creator error: Select only 'Scene' node type") + created_node = node + else: + created_node = nuke.createNode("Scene") - # get the latest nodes in tree for selecion - for n in nodes: - x = n - end = 0 - while end == 0: - try: - x = x.dependent()[0] - except: - end_node = x - end = 1 - end_nodes.append(end_node) + created_node["tile_color"].setValue( + int(self.node_color, 16)) - # set end_nodes - end_nodes = list(set(end_nodes)) + created_node["name"].setValue(node_name) - # check if nodes is 3d nodes - for n in end_nodes: - n['selected'].setValue(1) - sn = nuke.createNode("Scene") - if not sn.input(0): - end_nodes.remove(n) - nuke.delete(sn) + self.add_info_knob(created_node) - # loop over end nodes - for n in end_nodes: - n['selected'].setValue(1) + return created_node - self.nodes = nuke.selectedNodes() - nodes = self.nodes - if len(nodes) >= 1: - # loop selected nodes - for n in nodes: - data = self.data.copy() - if len(nodes) > 1: - # rename subset name only if more - # then one node are selected - subset = self.family + n["name"].value().capitalize() - data["subset"] = subset + def create(self, subset_name, instance_data, pre_create_data): + if self.check_existing_subset(subset_name, instance_data): + raise NukeCreatorError( + ("Subset name '{}' is already used. " + "Please specify different Variant.").format(subset_name)) - # change node color - n["tile_color"].setValue(int(self.node_color, 16)) - # add avalon knobs - set_avalon_knob_data(n, data) - return True - else: - msg = str("Please select nodes you " - "wish to add to a container") - self.log.error(msg) - nuke.message(msg) - return + instance = super(CreateModel, self).create( + subset_name, + instance_data, + pre_create_data + ) + + return instance + + def set_selected_nodes(self, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = nuke.selectedNodes() + if self.selected_nodes == []: + raise NukeCreatorError("Creator error: No active selection") + elif len(self.selected_nodes) > 1: + NukeCreatorError("Creator error: Select only one 'Scene' node") else: - # if selected is off then create one node - model_node = nuke.createNode("WriteGeo") - model_node["tile_color"].setValue(int(self.node_color, 16)) - # add avalon knobs - instance = set_avalon_knob_data(model_node, self.data) - return instance + self.selected_nodes = [] diff --git a/openpype/hosts/nuke/plugins/create/create_read.py b/openpype/hosts/nuke/plugins/create/create_read.py deleted file mode 100644 index 87a9dff0f86..00000000000 --- a/openpype/hosts/nuke/plugins/create/create_read.py +++ /dev/null @@ -1,57 +0,0 @@ -from collections import OrderedDict - -import nuke - -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - set_avalon_knob_data -) - - -class CrateRead(plugin.OpenPypeCreator): - # change this to template preset - name = "ReadCopy" - label = "Create Read Copy" - hosts = ["nuke"] - family = "source" - families = family - icon = "film" - defaults = ["Effect", "Backplate", "Fire", "Smoke"] - - def __init__(self, *args, **kwargs): - super(CrateRead, self).__init__(*args, **kwargs) - self.nodes = nuke.selectedNodes() - data = OrderedDict() - data['family'] = self.family - data['families'] = self.families - - for k, v in self.data.items(): - if k not in data.keys(): - data.update({k: v}) - - self.data = data - - def process(self): - self.name = self.data["subset"] - nodes = self.nodes - - if not nodes or len(nodes) == 0: - msg = "Please select Read node" - self.log.error(msg) - nuke.message(msg) - else: - count_reads = 0 - for node in nodes: - if node.Class() != 'Read': - continue - avalon_data = self.data - avalon_data['subset'] = "{}".format(self.name) - set_avalon_knob_data(node, avalon_data) - node['tile_color'].setValue(16744935) - count_reads += 1 - - if count_reads < 1: - msg = "Please select Read node" - self.log.error(msg) - nuke.message(msg) - return diff --git a/openpype/hosts/nuke/plugins/create/create_source.py b/openpype/hosts/nuke/plugins/create/create_source.py new file mode 100644 index 00000000000..35fe42c16b9 --- /dev/null +++ b/openpype/hosts/nuke/plugins/create/create_source.py @@ -0,0 +1,91 @@ +import nuke +import six +import sys +from openpype.hosts.nuke.api import ( + INSTANCE_DATA_KNOB, + NukeCreator, + NukeCreatorError, + set_node_data +) +from openpype.pipeline import ( + CreatedInstance +) + + +class CreateSource(NukeCreator): + """Add Publishable Read with source""" + + identifier = "create_source" + label = "Source (read)" + family = "source" + icon = "film" + default_variants = ["Effect", "Backplate", "Fire", "Smoke"] + + # plugin attributes + node_color = "0xff9100ff" + + def create_instance_node( + self, + node_name, + read_node + ): + read_node["tile_color"].setValue( + int(self.node_color, 16)) + read_node["name"].setValue(node_name) + self.add_info_knob(read_node) + return read_node + + def create(self, subset_name, instance_data, pre_create_data): + + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) + + try: + for read_node in self.selected_nodes: + if read_node.Class() != 'Read': + continue + + node_name = read_node.name() + _subset_name = subset_name + node_name + + # make sure subset name is unique + if self.check_existing_subset(_subset_name, instance_data): + raise NukeCreatorError( + ("subset {} is already published" + "definition.").format(_subset_name)) + + instance_node = self.create_instance_node( + _subset_name, + read_node + ) + instance = CreatedInstance( + self.family, + _subset_name, + instance_data, + self + ) + + instance.transient_data["node"] = instance_node + + self._add_instance_to_context(instance) + + set_node_data( + instance_node, + INSTANCE_DATA_KNOB, + instance.data_to_store() + ) + + except Exception as er: + six.reraise( + NukeCreatorError, + NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) + + def set_selected_nodes(self, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = nuke.selectedNodes() + if self.selected_nodes == []: + raise NukeCreatorError("Creator error: No active selection") + else: + NukeCreatorError( + "Creator error: only supprted with active selection") diff --git a/openpype/hosts/nuke/plugins/create/create_write_image.py b/openpype/hosts/nuke/plugins/create/create_write_image.py new file mode 100644 index 00000000000..cf70063abc5 --- /dev/null +++ b/openpype/hosts/nuke/plugins/create/create_write_image.py @@ -0,0 +1,176 @@ +import nuke +import sys +import six + +from openpype.pipeline import ( + CreatedInstance +) +from openpype.lib import ( + BoolDef, + NumberDef, + UISeparatorDef, + EnumDef +) +from openpype.hosts.nuke import api as napi + + +class CreateWriteImage(napi.NukeWriteCreator): + identifier = "create_write_image" + label = "Image (write)" + family = "image" + icon = "sign-out" + + instance_attributes = [ + "use_range_limit" + ] + default_variants = [ + "StillFrame", + "MPFrame", + "LayoutFrame" + ] + temp_rendering_path_template = ( + "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") + + def get_pre_create_attr_defs(self): + attr_defs = [ + BoolDef( + "use_selection", + default=True, + label="Use selection" + ), + self._get_render_target_enum(), + UISeparatorDef(), + self._get_frame_source_number() + ] + return attr_defs + + def _get_render_target_enum(self): + rendering_targets = { + "local": "Local machine rendering", + "frames": "Use existing frames" + } + + return EnumDef( + "render_target", + items=rendering_targets, + label="Render target" + ) + + def _get_frame_source_number(self): + return NumberDef( + "active_frame", + label="Active frame", + default=nuke.frame() + ) + + def get_instance_attr_defs(self): + attr_defs = [ + self._get_render_target_enum(), + self._get_reviewable_bool() + ] + return attr_defs + + def create_instance_node(self, subset_name, instance_data): + linked_knobs_ = [] + if "use_range_limit" in self.instance_attributes: + linked_knobs_ = ["channels", "___", "first", "last", "use_limit"] + + # add fpath_template + write_data = { + "creator": self.__class__.__name__, + "subset": subset_name, + "fpath_template": self.temp_rendering_path_template + } + write_data.update(instance_data) + + created_node = napi.create_write_node( + subset_name, + write_data, + input=self.selected_node, + prenodes=self.prenodes, + linked_knobs=linked_knobs_, + **{ + "frame": nuke.frame() + } + ) + self.add_info_knob(created_node) + + self._add_frame_range_limit(created_node, instance_data) + + self.integrate_links(created_node, outputs=True) + + return created_node + + def create(self, subset_name, instance_data, pre_create_data): + subset_name = subset_name.format(**pre_create_data) + + # pass values from precreate to instance + self.pass_pre_attributes_to_instance( + instance_data, + pre_create_data, + [ + "active_frame", + "render_target" + ] + ) + + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) + + # make sure subset name is unique + if self.check_existing_subset(subset_name, instance_data): + raise napi.NukeCreatorError( + ("subset {} is already published" + "definition.").format(subset_name)) + + instance_node = self.create_instance_node( + subset_name, + instance_data, + ) + + try: + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self + ) + + instance.transient_data["node"] = instance_node + + self._add_instance_to_context(instance) + + napi.set_node_data( + instance_node, + napi.INSTANCE_DATA_KNOB, + instance.data_to_store() + ) + + return instance + + except Exception as er: + six.reraise( + napi.NukeCreatorError, + napi.NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2] + ) + + def _add_frame_range_limit(self, write_node, instance_data): + if "use_range_limit" not in self.instance_attributes: + return + + active_frame = ( + instance_data["creator_attributes"].get("active_frame")) + + write_node.begin() + for n in nuke.allNodes(): + # get write node + if n.Class() in "Write": + w_node = n + write_node.end() + + w_node["use_limit"].setValue(True) + w_node["first"].setValue(active_frame or nuke.frame()) + w_node["last"].setExpression("first") + + return write_node diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index fec97167fb2..daa2319c4b1 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -1,56 +1,179 @@ import nuke +import sys +import six -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - create_write_node, create_write_node_legacy) +from openpype.pipeline import ( + CreatedInstance +) +from openpype.lib import ( + BoolDef, + NumberDef, + UISeparatorDef, + UILabelDef +) +from openpype.hosts.nuke import api as napi -class CreateWritePrerender(plugin.AbstractWriteRender): - # change this to template preset - name = "WritePrerender" - label = "Create Write Prerender" - hosts = ["nuke"] - n_class = "Write" +class CreateWritePrerender(napi.NukeWriteCreator): + identifier = "create_write_prerender" + label = "Prerender (write)" family = "prerender" icon = "sign-out" - # settings - fpath_template = "{work}/render/nuke/{subset}/{subset}.{frame}.{ext}" - defaults = ["Key01", "Bg01", "Fg01", "Branch01", "Part01"] - reviewable = False - use_range_limit = True + instance_attributes = [ + "use_range_limit" + ] + default_variants = [ + "Key01", + "Bg01", + "Fg01", + "Branch01", + "Part01" + ] + temp_rendering_path_template = ( + "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") - def __init__(self, *args, **kwargs): - super(CreateWritePrerender, self).__init__(*args, **kwargs) + def get_pre_create_attr_defs(self): + attr_defs = [ + BoolDef( + "use_selection", + default=True, + label="Use selection" + ), + self._get_render_target_enum() + ] + return attr_defs + + def get_instance_attr_defs(self): + attr_defs = [ + self._get_render_target_enum(), + self._get_reviewable_bool() + ] + if "farm_rendering" in self.instance_attributes: + attr_defs.extend([ + UISeparatorDef(), + UILabelDef("Farm rendering attributes"), + BoolDef("suspended_publish", label="Suspended publishing"), + NumberDef( + "farm_priority", + label="Priority", + minimum=1, + maximum=99, + default=50 + ), + NumberDef( + "farm_chunk", + label="Chunk size", + minimum=1, + maximum=99, + default=10 + ), + NumberDef( + "farm_concurency", + label="Concurent tasks", + minimum=1, + maximum=10, + default=1 + ) + ]) + return attr_defs + + def create_instance_node(self, subset_name, instance_data): + linked_knobs_ = [] + if "use_range_limit" in self.instance_attributes: + linked_knobs_ = ["channels", "___", "first", "last", "use_limit"] - def _create_write_node(self, selected_node, inputs, outputs, write_data): # add fpath_template - write_data["fpath_template"] = self.fpath_template - write_data["use_range_limit"] = self.use_range_limit - write_data["frame_range"] = ( - nuke.root()["first_frame"].value(), - nuke.root()["last_frame"].value() + write_data = { + "creator": self.__class__.__name__, + "subset": subset_name, + "fpath_template": self.temp_rendering_path_template + } + + write_data.update(instance_data) + + # get width and height + if self.selected_node: + width, height = ( + self.selected_node.width(), self.selected_node.height()) + else: + actual_format = nuke.root().knob('format').value() + width, height = (actual_format.width(), actual_format.height()) + + created_node = napi.create_write_node( + subset_name, + write_data, + input=self.selected_node, + prenodes=self.prenodes, + linked_knobs=linked_knobs_, + **{ + "width": width, + "height": height + } ) + self.add_info_knob(created_node) + + self._add_frame_range_limit(created_node) + + self.integrate_links(created_node, outputs=True) + + return created_node + + def create(self, subset_name, instance_data, pre_create_data): + # pass values from precreate to instance + self.pass_pre_attributes_to_instance( + instance_data, + pre_create_data, + [ + "render_target" + ] + ) + + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) - if not self.is_legacy(): - return create_write_node( - self.data["subset"], - write_data, - input=selected_node, - review=self.reviewable, - linked_knobs=["channels", "___", "first", "last", "use_limit"] + # make sure subset name is unique + if self.check_existing_subset(subset_name, instance_data): + raise napi.NukeCreatorError( + ("subset {} is already published" + "definition.").format(subset_name)) + + instance_node = self.create_instance_node( + subset_name, + instance_data + ) + + try: + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self ) - else: - return create_write_node_legacy( - self.data["subset"], - write_data, - input=selected_node, - review=self.reviewable, - linked_knobs=["channels", "___", "first", "last", "use_limit"] + + instance.transient_data["node"] = instance_node + + self._add_instance_to_context(instance) + + napi.set_node_data( + instance_node, + napi.INSTANCE_DATA_KNOB, + instance.data_to_store() ) - def _modify_write_node(self, write_node): - # open group node + return instance + + except Exception as er: + six.reraise( + napi.NukeCreatorError, + napi.NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2] + ) + + def _add_frame_range_limit(self, write_node): + if "use_range_limit" not in self.instance_attributes: + return + write_node.begin() for n in nuke.allNodes(): # get write node @@ -58,9 +181,8 @@ def _modify_write_node(self, write_node): w_node = n write_node.end() - if self.use_range_limit: - w_node["use_limit"].setValue(True) - w_node["first"].setValue(nuke.root()["first_frame"].value()) - w_node["last"].setValue(nuke.root()["last_frame"].value()) + w_node["use_limit"].setValue(True) + w_node["first"].setValue(nuke.root()["first_frame"].value()) + w_node["last"].setValue(nuke.root()["last_frame"].value()) return write_node diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 23846c03325..85133458d10 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -1,86 +1,160 @@ import nuke +import sys +import six -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - create_write_node, create_write_node_legacy) +from openpype.pipeline import ( + CreatedInstance +) +from openpype.lib import ( + BoolDef, + NumberDef, + UISeparatorDef, + UILabelDef +) +from openpype.hosts.nuke import api as napi -class CreateWriteRender(plugin.AbstractWriteRender): - # change this to template preset - name = "WriteRender" - label = "Create Write Render" - hosts = ["nuke"] - n_class = "Write" +class CreateWriteRender(napi.NukeWriteCreator): + identifier = "create_write_render" + label = "Render (write)" family = "render" icon = "sign-out" - # settings - fpath_template = "{work}/render/nuke/{subset}/{subset}.{frame}.{ext}" - defaults = ["Main", "Mask"] - prenodes = { - "Reformat01": { - "nodeclass": "Reformat", - "dependent": None, - "knobs": [ - { - "type": "text", - "name": "resize", - "value": "none" - }, - { - "type": "bool", - "name": "black_outside", - "value": True - } - ] - } - } + instance_attributes = [ + "reviewable" + ] + default_variants = [ + "Main", + "Mask" + ] + temp_rendering_path_template = ( + "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") + + def get_pre_create_attr_defs(self): + attr_defs = [ + BoolDef( + "use_selection", + default=True, + label="Use selection" + ), + self._get_render_target_enum() + ] + return attr_defs - def __init__(self, *args, **kwargs): - super(CreateWriteRender, self).__init__(*args, **kwargs) + def get_instance_attr_defs(self): + attr_defs = [ + self._get_render_target_enum(), + self._get_reviewable_bool() + ] + if "farm_rendering" in self.instance_attributes: + attr_defs.extend([ + UISeparatorDef(), + UILabelDef("Farm rendering attributes"), + BoolDef("suspended_publish", label="Suspended publishing"), + NumberDef( + "farm_priority", + label="Priority", + minimum=1, + maximum=99, + default=50 + ), + NumberDef( + "farm_chunk", + label="Chunk size", + minimum=1, + maximum=99, + default=10 + ), + NumberDef( + "farm_concurency", + label="Concurent tasks", + minimum=1, + maximum=10, + default=1 + ) + ]) + return attr_defs - def _create_write_node(self, selected_node, inputs, outputs, write_data): + def create_instance_node(self, subset_name, instance_data): # add fpath_template - write_data["fpath_template"] = self.fpath_template + write_data = { + "creator": self.__class__.__name__, + "subset": subset_name, + "fpath_template": self.temp_rendering_path_template + } + + write_data.update(instance_data) - # add reformat node to cut off all outside of format bounding box # get width and height - try: - width, height = (selected_node.width(), selected_node.height()) - except AttributeError: + if self.selected_node: + width, height = ( + self.selected_node.width(), self.selected_node.height()) + else: actual_format = nuke.root().knob('format').value() width, height = (actual_format.width(), actual_format.height()) - if not self.is_legacy(): - return create_write_node( - self.data["subset"], - write_data, - input=selected_node, - prenodes=self.prenodes, - **{ - "width": width, - "height": height - } - ) - else: - _prenodes = [ - { - "name": "Reformat01", - "class": "Reformat", - "knobs": [ - ("resize", 0), - ("black_outside", 1), - ], - "dependent": None - } + created_node = napi.create_write_node( + subset_name, + write_data, + input=self.selected_node, + prenodes=self.prenodes, + **{ + "width": width, + "height": height + } + ) + self.add_info_knob(created_node) + + self.integrate_links(created_node, outputs=False) + + return created_node + + def create(self, subset_name, instance_data, pre_create_data): + # pass values from precreate to instance + self.pass_pre_attributes_to_instance( + instance_data, + pre_create_data, + [ + "render_target" ] + ) + # make sure selected nodes are added + self.set_selected_nodes(pre_create_data) + + # make sure subset name is unique + if self.check_existing_subset(subset_name, instance_data): + raise napi.NukeCreatorError( + ("subset {} is already published" + "definition.").format(subset_name)) + + instance_node = self.create_instance_node( + subset_name, + instance_data + ) + + try: + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self + ) + + instance.transient_data["node"] = instance_node + + self._add_instance_to_context(instance) - return create_write_node_legacy( - self.data["subset"], - write_data, - input=selected_node, - prenodes=_prenodes + napi.set_node_data( + instance_node, + napi.INSTANCE_DATA_KNOB, + instance.data_to_store() ) - def _modify_write_node(self, write_node): - return write_node + return instance + + except Exception as er: + six.reraise( + napi.NukeCreatorError, + napi.NukeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2] + ) diff --git a/openpype/hosts/nuke/plugins/create/create_write_still.py b/openpype/hosts/nuke/plugins/create/create_write_still.py deleted file mode 100644 index bb08e8c2c6d..00000000000 --- a/openpype/hosts/nuke/plugins/create/create_write_still.py +++ /dev/null @@ -1,105 +0,0 @@ -import nuke - -from openpype.hosts.nuke.api import plugin -from openpype.hosts.nuke.api.lib import ( - create_write_node, - create_write_node_legacy, - get_created_node_imageio_setting_legacy -) - -# HACK: just to disable still image on projects which -# are not having anatomy imageio preset for CreateWriteStill -# TODO: remove this code as soon as it will be obsolete -imageio_writes = get_created_node_imageio_setting_legacy( - "Write", - "CreateWriteStill", - "stillMain" -) -print(imageio_writes["knobs"]) - - -class CreateWriteStill(plugin.AbstractWriteRender): - # change this to template preset - name = "WriteStillFrame" - label = "Create Write Still Image" - hosts = ["nuke"] - n_class = "Write" - family = "still" - icon = "image" - - # settings - fpath_template = "{work}/render/nuke/{subset}/{subset}.{ext}" - defaults = [ - "ImageFrame", - "MPFrame", - "LayoutFrame" - ] - prenodes = { - "FrameHold01": { - "nodeclass": "FrameHold", - "dependent": None, - "knobs": [ - { - "type": "formatable", - "name": "first_frame", - "template": "{frame}", - "to_type": "number" - } - ] - } - } - - def __init__(self, *args, **kwargs): - super(CreateWriteStill, self).__init__(*args, **kwargs) - - def _create_write_node(self, selected_node, inputs, outputs, write_data): - # add fpath_template - write_data["fpath_template"] = self.fpath_template - - if not self.is_legacy(): - return create_write_node( - self.name, - write_data, - input=selected_node, - review=False, - prenodes=self.prenodes, - farm=False, - linked_knobs=["channels", "___", "first", "last", "use_limit"], - **{ - "frame": nuke.frame() - } - ) - else: - _prenodes = [ - { - "name": "FrameHold01", - "class": "FrameHold", - "knobs": [ - ("first_frame", nuke.frame()) - ], - "dependent": None - } - ] - return create_write_node_legacy( - self.name, - write_data, - input=selected_node, - review=False, - prenodes=_prenodes, - farm=False, - linked_knobs=["channels", "___", "first", "last", "use_limit"] - ) - - def _modify_write_node(self, write_node): - write_node.begin() - for n in nuke.allNodes(): - # get write node - if n.Class() in "Write": - w_node = n - write_node.end() - - w_node["use_limit"].setValue(True) - w_node["first"].setValue(nuke.frame()) - w_node["last"].setValue(nuke.frame()) - - return write_node diff --git a/openpype/hosts/nuke/plugins/create/workfile_creator.py b/openpype/hosts/nuke/plugins/create/workfile_creator.py new file mode 100644 index 00000000000..72ef61e63f2 --- /dev/null +++ b/openpype/hosts/nuke/plugins/create/workfile_creator.py @@ -0,0 +1,69 @@ +import openpype.hosts.nuke.api as api +from openpype.client import get_asset_by_name +from openpype.pipeline import ( + AutoCreator, + CreatedInstance, + legacy_io, +) +from openpype.hosts.nuke.api import ( + INSTANCE_DATA_KNOB, + set_node_data +) +import nuke + + +class WorkfileCreator(AutoCreator): + identifier = "workfile" + family = "workfile" + + default_variant = "Main" + + def get_instance_attr_defs(self): + return [] + + def collect_instances(self): + root_node = nuke.root() + instance_data = api.get_node_data( + root_node, api.INSTANCE_DATA_KNOB + ) + + project_name = legacy_io.Session["AVALON_PROJECT"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] + + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + self.default_variant, task_name, asset_doc, + project_name, host_name + ) + instance_data.update({ + "asset": asset_name, + "task": task_name, + "variant": self.default_variant + }) + instance_data.update(self.get_dynamic_data( + self.default_variant, task_name, asset_doc, + project_name, host_name, instance_data + )) + + instance = CreatedInstance( + self.family, subset_name, instance_data, self + ) + instance.transient_data["node"] = root_node + self._add_instance_to_context(instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + instance_node = created_inst.transient_data["node"] + + set_node_data( + instance_node, + INSTANCE_DATA_KNOB, + created_inst.data_to_store() + ) + + def create(self, options=None): + # no need to create if it is created + # in `collect_instances` + pass diff --git a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py index 4efbb88b8c1..8eaefa68541 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/collect_backdrop.py @@ -1,9 +1,9 @@ +from pprint import pformat import pyblish.api from openpype.hosts.nuke.api import lib as pnlib import nuke -@pyblish.api.log class CollectBackdrops(pyblish.api.InstancePlugin): """Collect Backdrop node instance and its content """ @@ -14,8 +14,9 @@ class CollectBackdrops(pyblish.api.InstancePlugin): families = ["nukenodes"] def process(self, instance): + self.log.debug(pformat(instance.data)) - bckn = instance[0] + bckn = instance.data["transientData"]["node"] # define size of the backdrop left = bckn.xpos() @@ -23,6 +24,7 @@ def process(self, instance): right = left + bckn['bdwidth'].value() bottom = top + bckn['bdheight'].value() + instance.data["transientData"]["childNodes"] = [] # iterate all nodes for node in nuke.allNodes(): @@ -37,17 +39,17 @@ def process(self, instance): and (node.ypos() + node.screenHeight() < bottom): # add contained nodes to instance's node list - instance.append(node) + instance.data["transientData"]["childNodes"].append(node) # get all connections from outside of backdrop - nodes = instance[1:] + nodes = instance.data["transientData"]["childNodes"] connections_in, connections_out = pnlib.get_dependent_nodes(nodes) - instance.data["nodeConnectionsIn"] = connections_in - instance.data["nodeConnectionsOut"] = connections_out + instance.data["transientData"]["nodeConnectionsIn"] = connections_in + instance.data["transientData"]["nodeConnectionsOut"] = connections_out # make label nicer instance.data["label"] = "{0} ({1} nodes)".format( - bckn.name(), len(instance) - 1) + bckn.name(), len(instance.data["transientData"]["childNodes"])) instance.data["families"].append(instance.data["family"]) @@ -83,5 +85,4 @@ def process(self, instance): "frameStart": first_frame, "frameEnd": last_frame }) - self.log.info("Backdrop content collected: `{}`".format(instance[:])) self.log.info("Backdrop instance collected: `{}`".format(instance)) diff --git a/openpype/hosts/nuke/plugins/publish/collect_context_data.py b/openpype/hosts/nuke/plugins/publish/collect_context_data.py new file mode 100644 index 00000000000..5a1cdcf49ec --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/collect_context_data.py @@ -0,0 +1,69 @@ +import os +import nuke +import pyblish.api +import openpype.api as api +import openpype.hosts.nuke.api as napi +from openpype.pipeline import KnownPublishError + + +class CollectContextData(pyblish.api.ContextPlugin): + """Collect current context publish.""" + + order = pyblish.api.CollectorOrder - 0.499 + label = "Collect context data" + hosts = ['nuke'] + + def process(self, context): # sourcery skip: avoid-builtin-shadow + root_node = nuke.root() + + current_file = os.path.normpath(root_node.name()) + + if current_file.lower() == "root": + raise KnownPublishError( + "Workfile is not correct file name. \n" + "Use workfile tool to manage the name correctly." + ) + + # Get frame range + first_frame = int(root_node["first_frame"].getValue()) + last_frame = int(root_node["last_frame"].getValue()) + + # get instance data from root + root_instance_context = napi.get_node_data( + root_node, napi.INSTANCE_DATA_KNOB + ) + + handle_start = root_instance_context["handleStart"] + handle_end = root_instance_context["handleEnd"] + + # Get format + format = root_node['format'].value() + resolution_width = format.width() + resolution_height = format.height() + pixel_aspect = format.pixelAspect() + + script_data = { + "frameStart": first_frame + handle_start, + "frameEnd": last_frame - handle_end, + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height, + "pixelAspect": pixel_aspect, + + # backward compatibility handles + "handles": handle_start, + "handleStart": handle_start, + "handleEnd": handle_end, + "step": 1, + "fps": root_node['fps'].value(), + + "currentFile": current_file, + "version": int(api.get_version_from_path(current_file)), + + "host": pyblish.api.current_host(), + "hostVersion": nuke.NUKE_VERSION_STRING + } + + context.data["scriptData"] = script_data + context.data.update(script_data) + + self.log.info('Context from Nuke script collected') diff --git a/openpype/hosts/nuke/plugins/publish/collect_gizmo.py b/openpype/hosts/nuke/plugins/publish/collect_gizmo.py index 3db26096ae1..3a877fc194a 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/collect_gizmo.py @@ -2,25 +2,23 @@ import nuke -@pyblish.api.log class CollectGizmo(pyblish.api.InstancePlugin): """Collect Gizmo (group) node instance and its content """ order = pyblish.api.CollectorOrder + 0.22 - label = "Collect Gizmo (Group)" + label = "Collect Gizmo (group)" hosts = ["nuke"] families = ["gizmo"] def process(self, instance): - grpn = instance[0] + gizmo_node = instance.data["transientData"]["node"] # add family to familiess instance.data["families"].insert(0, instance.data["family"]) # make label nicer - instance.data["label"] = "{0} ({1} nodes)".format( - grpn.name(), len(instance) - 1) + instance.data["label"] = gizmo_node.name() # Get frame range handle_start = instance.context.data["handleStart"] @@ -46,5 +44,4 @@ def process(self, instance): "frameStart": first_frame, "frameEnd": last_frame }) - self.log.info("Gizmo content collected: `{}`".format(instance[:])) self.log.info("Gizmo instance collected: `{}`".format(instance)) diff --git a/openpype/hosts/nuke/plugins/publish/collect_instance_data.py b/openpype/hosts/nuke/plugins/publish/collect_instance_data.py new file mode 100644 index 00000000000..3908aef4bcc --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/collect_instance_data.py @@ -0,0 +1,44 @@ +import nuke +import pyblish.api + + +class CollectInstanceData(pyblish.api.InstancePlugin): + """Collect all nodes with Avalon knob.""" + + order = pyblish.api.CollectorOrder - 0.49 + label = "Collect Instance Data" + hosts = ["nuke", "nukeassist"] + + # presets + sync_workfile_version_on_families = [] + + def process(self, instance): + family = instance.data["family"] + + # Get format + root = nuke.root() + format_ = root['format'].value() + resolution_width = format_.width() + resolution_height = format_.height() + pixel_aspect = format_.pixelAspect() + + # sync workfile version + if family in self.sync_workfile_version_on_families: + self.log.debug( + "Syncing version with workfile for '{}'".format( + family + ) + ) + # get version to instance for integration + instance.data['version'] = instance.context.data['version'] + + instance.data.update({ + "step": 1, + "fps": root['fps'].value(), + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height, + "pixelAspect": pixel_aspect + + }) + self.log.debug("Collected instance: {}".format( + instance.data)) diff --git a/openpype/hosts/nuke/plugins/publish/collect_model.py b/openpype/hosts/nuke/plugins/publish/collect_model.py index 5fca240553a..9da056052be 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_model.py +++ b/openpype/hosts/nuke/plugins/publish/collect_model.py @@ -2,7 +2,6 @@ import nuke -@pyblish.api.log class CollectModel(pyblish.api.InstancePlugin): """Collect Model node instance and its content """ @@ -14,12 +13,12 @@ class CollectModel(pyblish.api.InstancePlugin): def process(self, instance): - grpn = instance[0] + geo_node = instance.data["transientData"]["node"] # add family to familiess instance.data["families"].insert(0, instance.data["family"]) # make label nicer - instance.data["label"] = grpn.name() + instance.data["label"] = geo_node.name() # Get frame range handle_start = instance.context.data["handleStart"] @@ -45,5 +44,4 @@ def process(self, instance): "frameStart": first_frame, "frameEnd": last_frame }) - self.log.info("Model content collected: `{}`".format(instance[:])) self.log.info("Model instance collected: `{}`".format(instance)) diff --git a/openpype/hosts/nuke/plugins/publish/collect_reads.py b/openpype/hosts/nuke/plugins/publish/collect_reads.py index b79d9646d59..a1144fbcc31 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_reads.py +++ b/openpype/hosts/nuke/plugins/publish/collect_reads.py @@ -2,12 +2,10 @@ import re import nuke import pyblish.api - from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io -@pyblish.api.log class CollectNukeReads(pyblish.api.InstancePlugin): """Collect all read nodes.""" @@ -17,6 +15,8 @@ class CollectNukeReads(pyblish.api.InstancePlugin): families = ["source"] def process(self, instance): + node = instance.data["transientData"]["node"] + project_name = legacy_io.active_project() asset_name = legacy_io.Session["AVALON_ASSET"] asset_doc = get_asset_by_name(project_name, asset_name) @@ -25,7 +25,6 @@ def process(self, instance): self.log.debug("checking instance: {}".format(instance)) - node = instance[0] if node.Class() != "Read": return @@ -99,10 +98,7 @@ def process(self, instance): } instance.data["representations"].append(representation) - transfer = False - if "publish" in node.knobs(): - transfer = node["publish"] - + transfer = node["publish"] if "publish" in node.knobs() else False instance.data['transfer'] = transfer # Add version data to instance diff --git a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py index bfe32d8fd1b..57010876977 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py +++ b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py @@ -8,10 +8,10 @@ class CollectSlate(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.09 label = "Collect Slate Node" hosts = ["nuke"] - families = ["render", "render.local", "render.farm"] + families = ["render"] def process(self, instance): - node = instance[0] + node = instance.data["transientData"]["node"] slate = next((n for n in nuke.allNodes() if "slate" in n.name().lower() @@ -35,7 +35,6 @@ def process(self, instance): instance.data["slateNode"] = slate_node instance.data["slate"] = True instance.data["families"].append("slate") - instance.data["versionData"]["families"].append("slate") self.log.info( "Slate node is in node graph: `{}`".format(slate.name())) self.log.debug( diff --git a/openpype/hosts/nuke/plugins/publish/collect_workfile.py b/openpype/hosts/nuke/plugins/publish/collect_workfile.py new file mode 100644 index 00000000000..852042e6e95 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/collect_workfile.py @@ -0,0 +1,40 @@ +import os +import nuke +import pyblish.api + + +class CollectWorkfile(pyblish.api.InstancePlugin): + """Collect current script for publish.""" + + order = pyblish.api.CollectorOrder + label = "Collect Workfile" + hosts = ['nuke'] + families = ["workfile"] + + def process(self, instance): # sourcery skip: avoid-builtin-shadow + + script_data = instance.context.data["scriptData"] + current_file = os.path.normpath(nuke.root().name()) + + # creating instances per write node + staging_dir = os.path.dirname(current_file) + base_name = os.path.basename(current_file) + + # creating representation + representation = { + 'name': 'nk', + 'ext': 'nk', + 'files': base_name, + "stagingDir": staging_dir, + } + + # creating instance data + instance.data.update({ + "name": base_name, + "representations": [representation] + }) + + # adding basic script data + instance.data.update(script_data) + + self.log.info("Collect script version") diff --git a/openpype/hosts/nuke/plugins/publish/collect_writes.py b/openpype/hosts/nuke/plugins/publish/collect_writes.py new file mode 100644 index 00000000000..3054e5a30c5 --- /dev/null +++ b/openpype/hosts/nuke/plugins/publish/collect_writes.py @@ -0,0 +1,186 @@ +import os +from pprint import pformat +import nuke +import pyblish.api +from openpype.hosts.nuke import api as napi + + +class CollectNukeWrites(pyblish.api.InstancePlugin): + """Collect all write nodes.""" + + order = pyblish.api.CollectorOrder - 0.48 + label = "Collect Writes" + hosts = ["nuke", "nukeassist"] + families = ["render", "prerender", "image"] + + def process(self, instance): + self.log.debug(pformat(instance.data)) + creator_attributes = instance.data["creator_attributes"] + instance.data.update(creator_attributes) + + group_node = instance.data["transientData"]["node"] + render_target = instance.data["render_target"] + family = instance.data["family"] + families = instance.data["families"] + + # add targeted family to families + instance.data["families"].append( + "{}.{}".format(family, render_target) + ) + if instance.data.get("review"): + instance.data["families"].append("review") + + child_nodes = napi.get_instance_group_node_childs(instance) + instance.data["transientData"]["childNodes"] = child_nodes + + write_node = None + for x in child_nodes: + if x.Class() == "Write": + write_node = x + + if write_node is None: + self.log.warning( + "Created node '{}' is missing write node!".format( + group_node.name() + ) + ) + return + + instance.data["writeNode"] = write_node + self.log.debug("checking instance: {}".format(instance)) + + # Determine defined file type + ext = write_node["file_type"].value() + + # Get frame range + handle_start = instance.context.data["handleStart"] + handle_end = instance.context.data["handleEnd"] + first_frame = int(nuke.root()["first_frame"].getValue()) + last_frame = int(nuke.root()["last_frame"].getValue()) + frame_length = int(last_frame - first_frame + 1) + + if write_node["use_limit"].getValue(): + first_frame = int(write_node["first"].getValue()) + last_frame = int(write_node["last"].getValue()) + + write_file_path = nuke.filename(write_node) + output_dir = os.path.dirname(write_file_path) + + self.log.debug('output dir: {}'.format(output_dir)) + + if render_target == "frames": + representation = { + 'name': ext, + 'ext': ext, + "stagingDir": output_dir, + "tags": [] + } + + # get file path knob + node_file_knob = write_node["file"] + # list file paths based on input frames + expected_paths = list(sorted({ + node_file_knob.evaluate(frame) + for frame in range(first_frame, last_frame + 1) + })) + + # convert only to base names + expected_filenames = [ + os.path.basename(filepath) + for filepath in expected_paths + ] + + # make sure files are existing at folder + collected_frames = [ + filename + for filename in os.listdir(output_dir) + if filename in expected_filenames + ] + + if collected_frames: + collected_frames_len = len(collected_frames) + frame_start_str = "%0{}d".format( + len(str(last_frame))) % first_frame + representation['frameStart'] = frame_start_str + + # in case slate is expected and not yet rendered + self.log.debug("_ frame_length: {}".format(frame_length)) + self.log.debug("_ collected_frames_len: {}".format( + collected_frames_len)) + + # this will only run if slate frame is not already + # rendered from previews publishes + if ( + "slate" in families + and frame_length == collected_frames_len + and family == "render" + ): + frame_slate_str = ( + "{{:0{}d}}".format(len(str(last_frame))) + ).format(first_frame - 1) + + slate_frame = collected_frames[0].replace( + frame_start_str, frame_slate_str) + collected_frames.insert(0, slate_frame) + + if collected_frames_len == 1: + representation['files'] = collected_frames.pop() + else: + representation['files'] = collected_frames + + instance.data["representations"].append(representation) + self.log.info("Publishing rendered frames ...") + + elif render_target == "farm": + farm_priority = creator_attributes.get("farm_priority") + farm_chunk = creator_attributes.get("farm_chunk") + farm_concurency = creator_attributes.get("farm_concurency") + instance.data.update({ + "deadlineChunkSize": farm_chunk or 1, + "deadlinePriority": farm_priority or 50, + "deadlineConcurrentTasks": farm_concurency or 0 + }) + # Farm rendering + instance.data["transfer"] = False + instance.data["farm"] = True + self.log.info("Farm rendering ON ...") + + # get colorspace and add to version data + colorspace = napi.get_colorspace_from_node(write_node) + version_data = { + "colorspace": colorspace + } + + instance.data.update({ + "versionData": version_data, + "path": write_file_path, + "outputDir": output_dir, + "ext": ext, + "colorspace": colorspace + }) + + if family == "render": + instance.data.update({ + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": first_frame + handle_start, + "frameEnd": last_frame - handle_end, + "frameStartHandle": first_frame, + "frameEndHandle": last_frame, + }) + else: + instance.data.update({ + "handleStart": 0, + "handleEnd": 0, + "frameStart": first_frame, + "frameEnd": last_frame, + "frameStartHandle": first_frame, + "frameEndHandle": last_frame, + }) + + # make sure rendered sequence on farm will + # be used for exctract review + if not instance.data["review"]: + instance.data["useSequenceForReview"] = False + + self.log.debug("instance.data: {}".format(pformat(instance.data))) diff --git a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py index d1e5c4cc5ab..5166fa4b2c0 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/extract_backdrop.py @@ -26,8 +26,14 @@ class ExtractBackdropNode(publish.Extractor): families = ["nukenodes"] def process(self, instance): - tmp_nodes = list() - nodes = instance[1:] + tmp_nodes = [] + child_nodes = instance.data["transientData"]["childNodes"] + # all connections outside of backdrop + connections_in = instance.data["transientData"]["nodeConnectionsIn"] + connections_out = instance.data["transientData"]["nodeConnectionsOut"] + self.log.debug("_ connections_in: `{}`".format(connections_in)) + self.log.debug("_ connections_out: `{}`".format(connections_out)) + # Define extract output file path stagingdir = self.staging_dir(instance) filename = "{0}.nk".format(instance.name) @@ -35,20 +41,14 @@ def process(self, instance): # maintain selection with maintained_selection(): - # all connections outside of backdrop - connections_in = instance.data["nodeConnectionsIn"] - connections_out = instance.data["nodeConnectionsOut"] - self.log.debug("_ connections_in: `{}`".format(connections_in)) - self.log.debug("_ connections_out: `{}`".format(connections_out)) - - # create input nodes and name them as passing node (*_INP) + # create input child_nodes and name them as passing node (*_INP) for n, inputs in connections_in.items(): for i, input in inputs: inpn = nuke.createNode("Input") inpn["name"].setValue("{}_{}_INP".format(n.name(), i)) n.setInput(i, inpn) inpn.setXYpos(input.xpos(), input.ypos()) - nodes.append(inpn) + child_nodes.append(inpn) tmp_nodes.append(inpn) reset_selection() @@ -63,13 +63,13 @@ def process(self, instance): if d.name() in n.name()), 0), opn) opn.setInput(0, n) opn.autoplace() - nodes.append(opn) + child_nodes.append(opn) tmp_nodes.append(opn) reset_selection() - # select nodes to copy + # select child_nodes to copy reset_selection() - select_nodes(nodes) + select_nodes(child_nodes) # create tmp nk file # save file to the path nuke.nodeCopy(path) @@ -104,6 +104,3 @@ def process(self, instance): self.log.info("Extracted instance '{}' to: {}".format( instance.name, path)) - - self.log.info("Data {}".format( - instance.data)) diff --git a/openpype/hosts/nuke/plugins/publish/extract_camera.py b/openpype/hosts/nuke/plugins/publish/extract_camera.py index b751bfab039..4286f71e834 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_camera.py +++ b/openpype/hosts/nuke/plugins/publish/extract_camera.py @@ -28,6 +28,7 @@ class ExtractCamera(publish.Extractor): ] def process(self, instance): + camera_node = instance.data["transientData"]["node"] handle_start = instance.context.data["handleStart"] handle_end = instance.context.data["handleEnd"] first_frame = int(nuke.root()["first_frame"].getValue()) @@ -38,7 +39,7 @@ def process(self, instance): self.log.info("instance.data: `{}`".format( pformat(instance.data))) - rm_nodes = list() + rm_nodes = [] self.log.info("Crating additional nodes") subset = instance.data["subset"] staging_dir = self.staging_dir(instance) @@ -58,7 +59,7 @@ def process(self, instance): with maintained_selection(): # bake camera with axeses onto word coordinate XYZ rm_n = bakeCameraWithAxeses( - nuke.toNode(instance.data["name"]), output_range) + camera_node, output_range) rm_nodes.append(rm_n) # create scene node diff --git a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py index 3047ad67242..b0b1a9f7b78 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/extract_gizmo.py @@ -19,13 +19,14 @@ class ExtractGizmo(publish.Extractor): """ order = pyblish.api.ExtractorOrder - label = "Extract Gizmo (Group)" + label = "Extract Gizmo (group)" hosts = ["nuke"] families = ["gizmo"] def process(self, instance): - tmp_nodes = list() - orig_grpn = instance[0] + tmp_nodes = [] + orig_grpn = instance.data["transientData"]["node"] + # Define extract output file path stagingdir = self.staging_dir(instance) filename = "{0}.nk".format(instance.name) @@ -54,15 +55,6 @@ def process(self, instance): # convert gizmos to groups pnutils.bake_gizmos_recursively(copy_grpn) - # remove avalonknobs - knobs = copy_grpn.knobs() - avalon_knobs = [k for k in knobs.keys() - for ak in ["avalon:", "ak:"] - if ak in k] - avalon_knobs.append("publish") - for ak in avalon_knobs: - copy_grpn.removeKnob(knobs[ak]) - # add to temporary nodes tmp_nodes.append(copy_grpn) diff --git a/openpype/hosts/nuke/plugins/publish/extract_model.py b/openpype/hosts/nuke/plugins/publish/extract_model.py index d82cb3110b7..814d4041375 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_model.py +++ b/openpype/hosts/nuke/plugins/publish/extract_model.py @@ -36,8 +36,9 @@ def process(self, instance): self.log.info("instance.data: `{}`".format( pformat(instance.data))) - rm_nodes = list() - model_node = instance[0] + rm_nodes = [] + model_node = instance.data["transientData"]["node"] + self.log.info("Crating additional nodes") subset = instance.data["subset"] staging_dir = self.staging_dir(instance) diff --git a/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py b/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py index eb9bc0b4290..e66cfd9018e 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py +++ b/openpype/hosts/nuke/plugins/publish/extract_ouput_node.py @@ -16,13 +16,17 @@ class CreateOutputNode(pyblish.api.ContextPlugin): def process(self, context): # capture selection state with maintained_selection(): - active_node = [node for inst in context - for node in inst - if "ak:family" in node.knobs()] + + active_node = [ + inst.data.get("transientData", {}).get("node") + for inst in context + if inst.data.get("transientData", {}).get("node") + if inst.data.get( + "transientData", {}).get("node").Class() != "Root" + ] if active_node: - self.log.info(active_node) - active_node = active_node[0] + active_node = active_node.pop() self.log.info(active_node) active_node['selected'].setValue(True) diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index 843d5887866..811b2d4ffbb 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -23,9 +23,13 @@ class NukeRenderLocal(publish.Extractor): def process(self, instance): families = instance.data["families"] + child_nodes = ( + instance.data.get("transientData", {}).get("childNodes") + or instance + ) node = None - for x in instance: + for x in child_nodes: if x.Class() == "Write": node = x diff --git a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py index 19eae9638b9..a1a0e241c07 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/nuke/plugins/publish/extract_thumbnail.py @@ -4,11 +4,7 @@ import pyblish.api from openpype.pipeline import publish -from openpype.hosts.nuke.api import ( - maintained_selection, - get_view_process_node -) - +from openpype.hosts.nuke import api as napi if sys.version_info[0] >= 3: unicode = str @@ -38,7 +34,7 @@ def process(self, instance): if "render.farm" in instance.data["families"]: return - with maintained_selection(): + with napi.maintained_selection(): self.log.debug("instance: {}".format(instance)) self.log.debug("instance.data[families]: {}".format( instance.data["families"])) @@ -69,7 +65,7 @@ def render_thumbnail(self, instance, output_name=None, **kwargs): bake_viewer_input_process_node = kwargs[ "bake_viewer_input_process"] - node = instance[0] # group node + node = instance.data["transientData"]["node"] # group node self.log.info("Creating staging dir...") if "representations" not in instance.data: @@ -144,7 +140,7 @@ def render_thumbnail(self, instance, output_name=None, **kwargs): if bake_viewer_process: if bake_viewer_input_process_node: # get input process and connect it to baking - ipn = get_view_process_node() + ipn = napi.get_view_process_node() if ipn is not None: ipn.setInput(0, previous_node) previous_node = ipn diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml index 1097909a5fa..0422917e9c4 100644 --- a/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml +++ b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml @@ -1,7 +1,7 @@ - Shot/Asset mame + Shot/Asset name ## Invalid Shot/Asset name in subset diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml index cdf85102bcc..1717622a456 100644 --- a/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml +++ b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml @@ -3,16 +3,30 @@ Knobs values -## Invalid node's knobs values + ## Invalid node's knobs values -Following write node knobs needs to be repaired: + Following write node knobs needs to be repaired: -{xml_msg} + {xml_msg} -### How to repair? + ### How to repair? -1. Use Repair button. -2. Hit Reload button on the publisher. + 1. Use Repair button. + 2. Hit Reload button on the publisher. + + + + Legacy knob types + + ## Knobs are in obsolete configuration + + Settings needs to be fixed. + + ### How to repair? + + Contact your supervisor or fix it in project settings at + 'project_settings/nuke/imageio/nodes/requiredNodes' at knobs. + Each '__legacy__' type has to be defined accordingly to its type. \ No newline at end of file diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py deleted file mode 100644 index b396056eb98..00000000000 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ /dev/null @@ -1,158 +0,0 @@ -import nuke -import pyblish.api - -from openpype.hosts.nuke.api.lib import ( - add_publish_knob, - get_avalon_knob_data -) - - -@pyblish.api.log -class PreCollectNukeInstances(pyblish.api.ContextPlugin): - """Collect all nodes with Avalon knob.""" - - order = pyblish.api.CollectorOrder - 0.49 - label = "Pre-collect Instances" - hosts = ["nuke", "nukeassist"] - - # presets - sync_workfile_version_on_families = [] - - def process(self, context): - instances = [] - - root = nuke.root() - - self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes())) - for node in nuke.allNodes(): - - if node.Class() in ["Viewer", "Dot"]: - continue - - try: - if node["disable"].value(): - continue - except Exception as E: - self.log.warning(E) - - # get data from avalon knob - avalon_knob_data = get_avalon_knob_data( - node, ["avalon:", "ak:"]) - - self.log.debug("avalon_knob_data: {}".format(avalon_knob_data)) - - if not avalon_knob_data: - continue - - if avalon_knob_data["id"] != "pyblish.avalon.instance": - continue - - # establish families - family = avalon_knob_data["family"] - families_ak = avalon_knob_data.get("families", []) - families = [] - - # except disabled nodes but exclude backdrops in test - if ("nukenodes" not in family) and (node["disable"].value()): - continue - - subset = avalon_knob_data.get( - "subset", None) or node["name"].value() - - # Create instance - instance = context.create_instance(subset) - instance.append(node) - - suspend_publish = False - if "suspend_publish" in node.knobs(): - suspend_publish = node["suspend_publish"].value() - instance.data["suspend_publish"] = suspend_publish - - # get review knob value - review = False - if "review" in node.knobs(): - review = node["review"].value() - - if review: - families.append("review") - - # Add all nodes in group instances. - if node.Class() == "Group": - # only alter families for render family - if families_ak and "write" in families_ak.lower(): - target = node["render"].value() - if target == "Use existing frames": - # Local rendering - self.log.info("flagged for no render") - families.append(families_ak.lower()) - elif target == "Local": - # Local rendering - self.log.info("flagged for local render") - families.append("{}.local".format(family)) - family = families_ak.lower() - elif target == "On farm": - # Farm rendering - self.log.info("flagged for farm render") - instance.data["transfer"] = False - instance.data["farm"] = True - families.append("{}.farm".format(family)) - family = families_ak.lower() - - node.begin() - for i in nuke.allNodes(): - instance.append(i) - node.end() - - if not families and families_ak and family not in [ - "render", "prerender"]: - families.append(families_ak.lower()) - - self.log.debug("__ family: `{}`".format(family)) - self.log.debug("__ families: `{}`".format(families)) - - # Get format - format_ = root['format'].value() - resolution_width = format_.width() - resolution_height = format_.height() - pixel_aspect = format_.pixelAspect() - - # get publish knob value - if "publish" not in node.knobs(): - add_publish_knob(node) - - # sync workfile version - _families_test = [family] + families - self.log.debug("__ _families_test: `{}`".format(_families_test)) - for family_test in _families_test: - if family_test in self.sync_workfile_version_on_families: - self.log.debug( - "Syncing version with workfile for '{}'".format( - family_test - ) - ) - # get version to instance for integration - instance.data['version'] = instance.context.data['version'] - - instance.data.update({ - "subset": subset, - "asset": avalon_knob_data["asset"], - "label": node.name(), - "name": node.name(), - "subset": subset, - "family": family, - "families": families, - "avalonKnob": avalon_knob_data, - "step": 1, - "publish": node.knob('publish').value(), - "fps": nuke.root()['fps'].value(), - "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height, - "pixelAspect": pixel_aspect, - "review": review, - "representations": [] - - }) - self.log.info("collected instance: {}".format(instance.data)) - instances.append(instance) - - self.log.debug("context: {}".format(context)) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py deleted file mode 100644 index 316c651b66e..00000000000 --- a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py +++ /dev/null @@ -1,107 +0,0 @@ -import os - -import nuke - -import pyblish.api - -from openpype.lib import get_version_from_path -from openpype.hosts.nuke.api.lib import ( - add_publish_knob, - get_avalon_knob_data -) -from openpype.pipeline import KnownPublishError - - -class CollectWorkfile(pyblish.api.ContextPlugin): - """Collect current script for publish.""" - - order = pyblish.api.CollectorOrder - 0.50 - label = "Pre-collect Workfile" - hosts = ['nuke'] - - def process(self, context): # sourcery skip: avoid-builtin-shadow - root = nuke.root() - - current_file = os.path.normpath(nuke.root().name()) - - if current_file.lower() == "root": - raise KnownPublishError( - "Workfile is not correct file name. \n" - "Use workfile tool to manage the name correctly." - ) - - knob_data = get_avalon_knob_data(root) - - add_publish_knob(root) - - family = "workfile" - task = os.getenv("AVALON_TASK", None) - # creating instances per write node - staging_dir = os.path.dirname(current_file) - base_name = os.path.basename(current_file) - subset = family + task.capitalize() - - # Get frame range - first_frame = int(root["first_frame"].getValue()) - last_frame = int(root["last_frame"].getValue()) - - handle_start = int(knob_data.get("handleStart", 0)) - handle_end = int(knob_data.get("handleEnd", 0)) - - # Get format - format = root['format'].value() - resolution_width = format.width() - resolution_height = format.height() - pixel_aspect = format.pixelAspect() - - # Create instance - instance = context.create_instance(subset) - instance.add(root) - - script_data = { - "asset": os.getenv("AVALON_ASSET", None), - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height, - "pixelAspect": pixel_aspect, - - # backward compatibility - "handles": handle_start, - - "handleStart": handle_start, - "handleEnd": handle_end, - "step": 1, - "fps": root['fps'].value(), - - "currentFile": current_file, - "version": int(get_version_from_path(current_file)), - - "host": pyblish.api.current_host(), - "hostVersion": nuke.NUKE_VERSION_STRING - } - context.data.update(script_data) - - # creating representation - representation = { - 'name': 'nk', - 'ext': 'nk', - 'files': base_name, - "stagingDir": staging_dir, - } - - # creating instance data - instance.data.update({ - "subset": subset, - "label": base_name, - "name": base_name, - "publish": root.knob('publish').value(), - "family": family, - "families": [family], - "representations": [representation] - }) - - # adding basic script data - instance.data.update(script_data) - - self.log.info('Publishing script version') diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py deleted file mode 100644 index 17c4bc30cfa..00000000000 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ /dev/null @@ -1,207 +0,0 @@ -import os -import re -from pprint import pformat -import nuke -import pyblish.api - -from openpype.client import ( - get_last_version_by_subset_name, - get_representations, -) -from openpype.pipeline import ( - legacy_io, - get_representation_path, -) - - -@pyblish.api.log -class CollectNukeWrites(pyblish.api.InstancePlugin): - """Collect all write nodes.""" - - order = pyblish.api.CollectorOrder - 0.48 - label = "Pre-collect Writes" - hosts = ["nuke", "nukeassist"] - families = ["write"] - - def process(self, instance): - _families_test = [instance.data["family"]] + instance.data["families"] - self.log.debug("_families_test: {}".format(_families_test)) - - node = None - for x in instance: - if x.Class() == "Write": - node = x - - if node is None: - return - - instance.data["writeNode"] = node - self.log.debug("checking instance: {}".format(instance)) - - # Determine defined file type - ext = node["file_type"].value() - - # Determine output type - output_type = "img" - if ext == "mov": - output_type = "mov" - - # Get frame range - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - frame_length = int(last_frame - first_frame + 1) - - if node["use_limit"].getValue(): - first_frame = int(node["first"].getValue()) - last_frame = int(node["last"].getValue()) - - # Prepare expected output paths by evaluating each frame of write node - # - paths are first collected to set to avoid duplicated paths, then - # sorted and converted to list - node_file = node["file"] - expected_paths = list(sorted({ - node_file.evaluate(frame) - for frame in range(first_frame, last_frame + 1) - })) - expected_filenames = [ - os.path.basename(filepath) - for filepath in expected_paths - ] - path = nuke.filename(node) - output_dir = os.path.dirname(path) - - self.log.debug('output dir: {}'.format(output_dir)) - - # create label - name = node.name() - # Include start and end render frame in label - label = "{0} ({1}-{2})".format( - name, - int(first_frame), - int(last_frame) - ) - - if [fm for fm in _families_test - if fm in ["render", "prerender", "still"]]: - if "representations" not in instance.data: - instance.data["representations"] = list() - - representation = { - 'name': ext, - 'ext': ext, - "stagingDir": output_dir, - "tags": list() - } - - try: - collected_frames = [ - filename - for filename in os.listdir(output_dir) - if filename in expected_filenames - ] - if collected_frames: - collected_frames_len = len(collected_frames) - frame_start_str = "%0{}d".format( - len(str(last_frame))) % first_frame - representation['frameStart'] = frame_start_str - - # in case slate is expected and not yet rendered - self.log.debug("_ frame_length: {}".format(frame_length)) - self.log.debug( - "_ collected_frames_len: {}".format( - collected_frames_len)) - # this will only run if slate frame is not already - # rendered from previews publishes - if "slate" in _families_test \ - and (frame_length == collected_frames_len) \ - and ("prerender" not in _families_test): - frame_slate_str = "%0{}d".format( - len(str(last_frame))) % (first_frame - 1) - slate_frame = collected_frames[0].replace( - frame_start_str, frame_slate_str) - collected_frames.insert(0, slate_frame) - - if collected_frames_len == 1: - representation['files'] = collected_frames.pop() - if "still" in _families_test: - instance.data['family'] = 'image' - instance.data["families"].remove('still') - else: - representation['files'] = collected_frames - instance.data["representations"].append(representation) - except Exception: - instance.data["representations"].append(representation) - self.log.debug("couldn't collect frames: {}".format(label)) - - # Add version data to instance - colorspace = node["colorspace"].value() - - # remove default part of the string - if "default (" in colorspace: - colorspace = re.sub(r"default.\(|\)", "", colorspace) - self.log.debug("colorspace: `{}`".format(colorspace)) - - version_data = { - "families": [ - _f.replace(".local", "").replace(".farm", "") - for _f in _families_test if "write" != _f - ], - "colorspace": colorspace - } - - group_node = [x for x in instance if x.Class() == "Group"][0] - dl_chunk_size = 1 - if "deadlineChunkSize" in group_node.knobs(): - dl_chunk_size = group_node["deadlineChunkSize"].value() - - dl_priority = 50 - if "deadlinePriority" in group_node.knobs(): - dl_priority = group_node["deadlinePriority"].value() - - dl_concurrent_tasks = 0 - if "deadlineConcurrentTasks" in group_node.knobs(): - dl_concurrent_tasks = group_node["deadlineConcurrentTasks"].value() - - instance.data.update({ - "versionData": version_data, - "path": path, - "outputDir": output_dir, - "ext": ext, - "label": label, - "outputType": output_type, - "colorspace": colorspace, - "deadlineChunkSize": dl_chunk_size, - "deadlinePriority": dl_priority, - "deadlineConcurrentTasks": dl_concurrent_tasks - }) - - if self.is_prerender(_families_test): - instance.data.update({ - "handleStart": 0, - "handleEnd": 0, - "frameStart": first_frame, - "frameEnd": last_frame, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, - }) - else: - instance.data.update({ - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, - }) - - # make sure rendered sequence on farm will - # be used for exctract review - if not instance.data["review"]: - instance.data["useSequenceForReview"] = False - - self.log.debug("instance.data: {}".format(pformat(instance.data))) - - def is_prerender(self, families): - return next((f for f in families if "prerender" in f), None) diff --git a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py index 52731140ffe..f6822bee45f 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_asset_name.py +++ b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py @@ -2,11 +2,10 @@ """Validate if instance asset is the same as context asset.""" from __future__ import absolute_import -import nuke import pyblish.api import openpype.hosts.nuke.api.lib as nlib -import openpype.hosts.nuke.api as nuke_api + from openpype.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -51,9 +50,10 @@ def process(self, context, plugin): self.deselect() def select(self, instances): - nlib.select_nodes( - [nuke.toNode(str(x)) for x in instances] - ) + for inst in instances: + if inst.data.get("transientData", {}).get("node"): + select_node = inst.data["transientData"]["node"] + select_node["selected"].setValue(True) def deselect(self): nlib.reset_selection() @@ -82,13 +82,14 @@ def process(self, context, plugin): # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(failed, plugin) + self.log.debug(instances) context_asset = context.data["assetEntity"]["name"] for instance in instances: - origin_node = instance[0] - nuke_api.lib.recreate_instance( - origin_node, avalon_data={"asset": context_asset} - ) + node = instance.data["transientData"]["node"] + node_data = nlib.get_node_data(node, nlib.INSTANCE_DATA_KNOB) + node_data["asset"] = context_asset + nlib.set_node_data(node, nlib.INSTANCE_DATA_KNOB, node_data) class ValidateCorrectAssetName(pyblish.api.InstancePlugin): @@ -112,6 +113,7 @@ class ValidateCorrectAssetName(pyblish.api.InstancePlugin): def process(self, instance): asset = instance.data.get("asset") context_asset = instance.context.data["assetEntity"]["name"] + node = instance.data["transientData"]["node"] msg = ( "Instance `{}` has wrong shot/asset name:\n" @@ -123,7 +125,7 @@ def process(self, instance): if asset != context_asset: raise PublishXmlValidationError( self, msg, formatting_data={ - "node_name": instance[0]["name"].value(), + "node_name": node.name(), "wrong_name": asset, "correct_name": context_asset } diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py index 17dc79dc568..208d4a24985 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py +++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py @@ -1,6 +1,6 @@ import nuke import pyblish -from openpype.hosts.nuke.api.lib import maintained_selection +from openpype.hosts.nuke import api as napi from openpype.pipeline import PublishXmlValidationError @@ -25,14 +25,14 @@ def process(self, context, plugin): # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(failed, plugin) - all_xC = list() - all_yC = list() + all_xC = [] + all_yC = [] # maintain selection - with maintained_selection(): + with napi.maintained_selection(): # collect all failed nodes xpos and ypos for instance in instances: - bdn = instance[0] + bdn = instance.data["transientData"]["node"] xC = bdn.xpos() + bdn.screenWidth() / 2 yC = bdn.ypos() + bdn.screenHeight() / 2 @@ -46,7 +46,6 @@ def process(self, context, plugin): nuke.zoom(2, [min(all_xC), min(all_yC)]) -@pyblish.api.log class ValidateBackdrop(pyblish.api.InstancePlugin): """ Validate amount of nodes on backdrop node in case user forgoten to add nodes above the publishing backdrop node. @@ -60,7 +59,8 @@ class ValidateBackdrop(pyblish.api.InstancePlugin): actions = [SelectCenterInNodeGraph] def process(self, instance): - connections_out = instance.data["nodeConnectionsOut"] + child_nodes = instance.data["transientData"]["childNodes"] + connections_out = instance.data["transientData"]["nodeConnectionsOut"] msg_multiple_outputs = ( "Only one outcoming connection from " @@ -78,10 +78,10 @@ def process(self, instance): self.log.debug( "Amount of nodes on instance: {}".format( - len(instance)) + len(child_nodes)) ) - if len(instance) == 1: + if child_nodes == []: raise PublishXmlValidationError( self, msg_no_nodes, diff --git a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py index 2321bd1fd44..878d938bea3 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py +++ b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py @@ -1,6 +1,6 @@ import pyblish from openpype.pipeline import PublishXmlValidationError -from openpype.hosts.nuke.api import maintained_selection +from openpype.hosts.nuke import api as napi import nuke @@ -26,45 +26,44 @@ def process(self, context, plugin): instances = pyblish.api.instances_by_plugin(failed, plugin) # maintain selection - with maintained_selection(): + with napi.maintained_selection(): # collect all failed nodes xpos and ypos for instance in instances: - grpn = instance[0] + grpn = instance.data["transientData"]["node"] nuke.showDag(grpn) -@pyblish.api.log class ValidateGizmo(pyblish.api.InstancePlugin): """Validate amount of output nodes in gizmo (group) node""" order = pyblish.api.ValidatorOrder optional = True families = ["gizmo"] - label = "Validate Gizmo (Group)" + label = "Validate Gizmo (group)" hosts = ["nuke"] actions = [OpenFailedGroupNode] def process(self, instance): - grpn = instance[0] + grpn = instance.data["transientData"]["node"] with grpn: connections_out = nuke.allNodes('Output') - msg_multiple_outputs = ( - "Only one outcoming connection from " - "\"{}\" is allowed").format(instance.data["name"]) - if len(connections_out) > 1: + msg_multiple_outputs = ( + "Only one outcoming connection from " + "\"{}\" is allowed").format(instance.data["name"]) + raise PublishXmlValidationError( self, msg_multiple_outputs, "multiple_outputs", {"node_name": grpn["name"].value()} ) connections_in = nuke.allNodes('Input') - msg_missing_inputs = ( - "At least one Input node has to be inside Group: " - "\"{}\"").format(instance.data["name"]) - if len(connections_in) == 0: + msg_missing_inputs = ( + "At least one Input node has to be inside Group: " + "\"{}\"").format(instance.data["name"]) + raise PublishXmlValidationError( self, msg_missing_inputs, "no_inputs", {"node_name": grpn["name"].value()} diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py index d44f27791a5..db21cdc7c50 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py +++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py @@ -61,17 +61,11 @@ def get_invalid_knobs(cls, context): invalid_knobs = [] for instance in context: - # Filter publisable instances. - if not instance.data["publish"]: - continue # Filter families. families = [instance.data["family"]] families += instance.data.get("families", []) - if not families: - continue - # Get all knobs to validate. knobs = {} for family in families: diff --git a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py index 1e59880f900..70451ebc959 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py +++ b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py @@ -1,12 +1,19 @@ import pyblish.api -from openpype.hosts.nuke.api import maintained_selection -from openpype.pipeline import PublishXmlValidationError +from openpype.hosts.nuke import api as napi from openpype.pipeline.publish import RepairAction +from openpype.pipeline import ( + PublishXmlValidationError, + OptionalPyblishPluginMixin +) + import nuke -class ValidateOutputResolution(pyblish.api.InstancePlugin): +class ValidateOutputResolution( + OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin +): """Validates Output Resolution. It is making sure the resolution of write's input is the same as @@ -15,7 +22,7 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder optional = True - families = ["render", "render.local", "render.farm"] + families = ["render"] label = "Write Resolution" hosts = ["nuke"] actions = [RepairAction] @@ -24,14 +31,22 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin): resolution_msg = "Reformat is set to wrong format" def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) if invalid: raise PublishXmlValidationError(self, invalid) @classmethod def get_reformat(cls, instance): + child_nodes = ( + instance.data.get("transientData", {}).get("childNodes") + or instance + ) + reformat = None - for inode in instance: + for inode in child_nodes: if inode.Class() != "Reformat": continue reformat = inode @@ -64,21 +79,26 @@ def _check_resolution(instance, reformat): @classmethod def repair(cls, instance): + child_nodes = ( + instance.data.get("transientData", {}).get("childNodes") + or instance + ) + invalid = cls.get_invalid(instance) - grp_node = instance[0] + grp_node = instance.data["transientData"]["node"] if cls.missing_msg == invalid: # make sure we are inside of the group node with grp_node: # find input node and select it _input = None - for inode in instance: + for inode in child_nodes: if inode.Class() != "Input": continue _input = inode # add reformat node under it - with maintained_selection(): + with napi.maintained_selection(): _input['selected'].setValue(True) _rfn = nuke.createNode("Reformat", "name Reformat01") _rfn["resize"].setValue(0) diff --git a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py index dac240ad197..c26a03f31a9 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py +++ b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py @@ -17,7 +17,6 @@ def process(self, context, plugin): rootNode["proxy"].setValue(False) -@pyblish.api.log class ValidateProxyMode(pyblish.api.ContextPlugin): """Validate active proxy mode""" diff --git a/openpype/hosts/nuke/plugins/publish/validate_read_legacy.py b/openpype/hosts/nuke/plugins/publish/validate_read_legacy.py deleted file mode 100644 index 2bf1ff81f87..00000000000 --- a/openpype/hosts/nuke/plugins/publish/validate_read_legacy.py +++ /dev/null @@ -1,87 +0,0 @@ -import os - -import nuke - -import toml -import pyblish.api -from bson.objectid import ObjectId - -from openpype.pipeline import ( - discover_loader_plugins, - load_container, -) - - -class RepairReadLegacyAction(pyblish.api.Action): - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - - # Get the errored instances - failed = [] - for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): - failed.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(failed, plugin) - - for instance in instances: - - data = toml.loads(instance[0]["avalon"].value()) - data["name"] = instance[0].name() - data["xpos"] = instance[0].xpos() - data["ypos"] = instance[0].ypos() - data["extension"] = os.path.splitext( - instance[0]["file"].value() - )[1][1:] - - data["connections"] = [] - for d in instance[0].dependent(): - for i in range(d.inputs()): - if d.input(i) == instance[0]: - data["connections"].append([i, d]) - - nuke.delete(instance[0]) - - loader_name = "LoadSequence" - if data["extension"] == "mov": - loader_name = "LoadMov" - - loader_plugin = None - for Loader in discover_loader_plugins(): - if Loader.__name__ != loader_name: - continue - - loader_plugin = Loader - - load_container( - Loader=loader_plugin, - representation=ObjectId(data["representation"]) - ) - - node = nuke.toNode(data["name"]) - for connection in data["connections"]: - connection[1].setInput(connection[0], node) - - node.setXYpos(data["xpos"], data["ypos"]) - - -class ValidateReadLegacy(pyblish.api.InstancePlugin): - """Validate legacy read instance[0]s.""" - - order = pyblish.api.ValidatorOrder - optional = True - families = ["read.legacy"] - label = "Read Legacy" - hosts = ["nuke"] - actions = [RepairReadLegacyAction] - - def process(self, instance): - - msg = "Clean up legacy read node \"{}\"".format(instance) - assert False, msg diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index 237ff423e5f..1c22c5b9d00 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -4,7 +4,6 @@ from openpype.pipeline import PublishXmlValidationError -@pyblish.api.log class RepairActionBase(pyblish.api.Action): on = "failed" icon = "wrench" @@ -23,6 +22,7 @@ def get_instance(context, plugin): def repair_knob(self, instances, state): for instance in instances: + node = instance.data["transientData"]["node"] files_remove = [os.path.join(instance.data["outputDir"], f) for r in instance.data.get("representations", []) for f in r.get("files", []) @@ -31,7 +31,7 @@ def repair_knob(self, instances, state): for f in files_remove: os.remove(f) self.log.debug("removing file: {}".format(f)) - instance[0]["render"].setValue(state) + node["render"].setValue(state) self.log.info("Rendering toggled to `{}`".format(state)) @@ -62,9 +62,10 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): actions = [RepairCollectionActionToLocal, RepairCollectionActionToFarm] def process(self, instance): + node = instance.data["transientData"]["node"] f_data = { - "node_name": instance[0]["name"].value() + "node_name": node.name() } for repre in instance.data["representations"]: diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py index f0632f80807..bd0bbf8044b 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -1,17 +1,19 @@ -from pprint import pformat +from copy import deepcopy import pyblish.api - -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline import ( + PublishXmlValidationError, + OptionalPyblishPluginMixin +) from openpype.pipeline.publish import RepairAction from openpype.hosts.nuke.api.lib import ( - get_avalon_knob_data, WorkfileSettings ) -import nuke -@pyblish.api.log -class ValidateScriptAttributes(pyblish.api.InstancePlugin): +class ValidateScriptAttributes( + OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin +): """ Validates file output. """ order = pyblish.api.ValidatorOrder + 0.1 @@ -22,14 +24,12 @@ class ValidateScriptAttributes(pyblish.api.InstancePlugin): actions = [RepairAction] def process(self, instance): - root = nuke.root() - knob_data = get_avalon_knob_data(root) + if not self.is_active(instance.data): + return + + script_data = deepcopy(instance.context.data["scriptData"]) + asset = instance.data["assetEntity"] - # get asset data frame values - frame_start = asset["data"]["frameStart"] - frame_end = asset["data"]["frameEnd"] - handle_start = asset["data"]["handleStart"] - handle_end = asset["data"]["handleEnd"] # These attributes will be checked attributes = [ @@ -48,37 +48,11 @@ def process(self, instance): for attr in attributes if attr in asset["data"] } - # fix float to max 4 digints (only for evaluating) - fps_data = float("{0:.4f}".format( - asset_attributes["fps"])) # fix frame values to include handles - asset_attributes.update({ - "frameStart": frame_start - handle_start, - "frameEnd": frame_end + handle_end, - "fps": fps_data - }) - - self.log.debug(pformat( - asset_attributes - )) - - # Get format - _format = root["format"].value() - - # Get values from nukescript - script_attributes = { - "handleStart": int(knob_data["handleStart"]), - "handleEnd": int(knob_data["handleEnd"]), - "fps": float("{0:.4f}".format(root['fps'].value())), - "frameStart": int(root["first_frame"].getValue()), - "frameEnd": int(root["last_frame"].getValue()), - "resolutionWidth": _format.width(), - "resolutionHeight": _format.height(), - "pixelAspect": _format.pixelAspect() - } - self.log.debug(pformat( - script_attributes - )) + asset_attributes["fps"] = float("{0:.4f}".format( + asset_attributes["fps"])) + script_data["fps"] = float("{0:.4f}".format( + script_data["fps"])) # Compare asset's values Nukescript X Database not_matching = [] @@ -87,14 +61,14 @@ def process(self, instance): "Asset vs Script attribute \"{}\": {}, {}".format( attr, asset_attributes[attr], - script_attributes[attr] + script_data[attr] ) ) - if asset_attributes[attr] != script_attributes[attr]: + if asset_attributes[attr] != script_data[attr]: not_matching.append({ "name": attr, "expected": asset_attributes[attr], - "actual": script_attributes[attr] + "actual": script_data[attr] }) # Raise error if not matching diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py b/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py deleted file mode 100644 index 907577a97dd..00000000000 --- a/openpype/hosts/nuke/plugins/publish/validate_write_deadline_tab.py +++ /dev/null @@ -1,53 +0,0 @@ -import pyblish.api -import openpype.hosts.nuke.lib - - -class RepairNukeWriteDeadlineTab(pyblish.api.Action): - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - - # Get the errored instances - failed = [] - for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): - failed.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(failed, plugin) - - for instance in instances: - group_node = [x for x in instance if x.Class() == "Group"][0] - - # Remove existing knobs. - knob_names = openpype.hosts.nuke.lib.get_deadline_knob_names() - for name, knob in group_node.knobs().items(): - if name in knob_names: - group_node.removeKnob(knob) - - openpype.hosts.nuke.lib.add_deadline_tab(group_node) - - -class ValidateNukeWriteDeadlineTab(pyblish.api.InstancePlugin): - """Ensure Deadline tab is present and current.""" - - order = pyblish.api.ValidatorOrder - label = "Deadline Tab" - hosts = ["nuke"] - optional = True - families = ["render"] - actions = [RepairNukeWriteDeadlineTab] - - def process(self, instance): - group_node = [x for x in instance if x.Class() == "Group"][0] - - knob_names = openpype.hosts.nuke.lib.get_deadline_knob_names() - missing_knobs = [] - for name in knob_names: - if name not in group_node.knobs().keys(): - missing_knobs.append(name) - assert not missing_knobs, "Missing knobs: {}".format(missing_knobs) diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py b/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py deleted file mode 100644 index 699526ef572..00000000000 --- a/openpype/hosts/nuke/plugins/publish/validate_write_legacy.py +++ /dev/null @@ -1,108 +0,0 @@ -import toml - -import nuke - -import pyblish.api - -from openpype.pipeline import discover_creator_plugins -from openpype.pipeline.publish import RepairAction -from openpype.hosts.nuke.api.lib import get_avalon_knob_data - - -class ValidateWriteLegacy(pyblish.api.InstancePlugin): - """Validate legacy write nodes.""" - - order = pyblish.api.ValidatorOrder - optional = True - families = ["write"] - label = "Validate Write Legacy" - hosts = ["nuke"] - actions = [RepairAction] - - def process(self, instance): - node = instance[0] - msg = "Clean up legacy write node \"{}\"".format(instance) - - if node.Class() not in ["Group", "Write"]: - return - - # test avalon knobs - family_knobs = ["ak:family", "avalon:family"] - family_test = [k for k in node.knobs().keys() if k in family_knobs] - self.log.debug("_ family_test: {}".format(family_test)) - - # test if render in family test knob - # and only one item should be available - assert len(family_test) == 1, msg + " > More avalon attributes" - assert "render" in node[family_test[0]].value() \ - or "still" in node[family_test[0]].value(), msg + \ - " > Not correct family" - # test if `file` knob in node, this way old - # non-group-node write could be detected - assert "file" not in node.knobs(), msg + \ - " > file knob should not be present" - - # check if write node is having old render targeting - assert "render_farm" not in node.knobs(), msg + \ - " > old way of setting render target" - - @classmethod - def repair(cls, instance): - node = instance[0] - - if "Write" in node.Class(): - data = toml.loads(node["avalon"].value()) - else: - data = get_avalon_knob_data(node) - - # collect reusable data - data["XYpos"] = (node.xpos(), node.ypos()) - data["input"] = node.input(0) - data["publish"] = node["publish"].value() - data["render"] = node["render"].value() - data["render_farm"] = node["render_farm"].value() - data["review"] = node["review"].value() - data["use_limit"] = node["use_limit"].value() - data["first"] = node["first"].value() - data["last"] = node["last"].value() - - family = data["family"] - cls.log.debug("_ orig node family: {}".format(family)) - - # define what family of write node should be recreated - if family == "render": - Create_name = "CreateWriteRender" - elif family == "prerender": - Create_name = "CreateWritePrerender" - elif family == "still": - Create_name = "CreateWriteStill" - - # get appropriate plugin class - creator_plugin = None - for Creator in discover_creator_plugins(): - if Creator.__name__ != Create_name: - continue - - creator_plugin = Creator - - # delete the legaci write node - nuke.delete(node) - - # create write node with creator - new_node_name = data["subset"] - creator_plugin(new_node_name, data["asset"]).process() - - node = nuke.toNode(new_node_name) - node.setXYpos(*data["XYpos"]) - node.setInput(0, data["input"]) - node["publish"].setValue(data["publish"]) - node["review"].setValue(data["review"]) - node["use_limit"].setValue(data["use_limit"]) - node["first"].setValue(data["first"]) - node["last"].setValue(data["last"]) - - # recreate render targets - if data["render"]: - node["render"].setValue("Local") - if data["render_farm"]: - node["render"].setValue("On farm") diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py index 3e2881f2985..aeecea655f3 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py +++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py @@ -5,10 +5,13 @@ set_node_knobs_from_settings, color_gui_to_int ) -from openpype.pipeline import PublishXmlValidationError + +from openpype.pipeline.publish import ( + PublishXmlValidationError, + OptionalPyblishPluginMixin, +) -@pyblish.api.log class RepairNukeWriteNodeAction(pyblish.api.Action): label = "Repair" on = "failed" @@ -18,10 +21,15 @@ def process(self, context, plugin): instances = get_errored_instances_from_context(context) for instance in instances: - write_group_node = instance[0] + child_nodes = ( + instance.data.get("transientData", {}).get("childNodes") + or instance + ) + + write_group_node = instance.data["transientData"]["node"] # get write node from inside of group write_node = None - for x in instance: + for x in child_nodes: if x.Class() == "Write": write_node = x @@ -32,7 +40,10 @@ def process(self, context, plugin): self.log.info("Node attributes were fixed") -class ValidateNukeWriteNode(pyblish.api.InstancePlugin): +class ValidateNukeWriteNode( + OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin +): """ Validate Write node's knobs. Compare knobs on write node inside the render group @@ -42,16 +53,24 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder optional = True families = ["render"] - label = "Write Node" + label = "Validate write node" actions = [RepairNukeWriteNodeAction] hosts = ["nuke"] def process(self, instance): - write_group_node = instance[0] + if not self.is_active(instance.data): + return + + child_nodes = ( + instance.data.get("transientData", {}).get("childNodes") + or instance + ) + + write_group_node = instance.data["transientData"]["node"] # get write node from inside of group write_node = None - for x in instance: + for x in child_nodes: if x.Class() == "Write": write_node = x @@ -60,17 +79,31 @@ def process(self, instance): correct_data = get_write_node_template_attr(write_group_node) - if correct_data: - check_knobs = correct_data["knobs"] - else: - return - check = [] self.log.debug("__ write_node: {}".format( write_node )) + self.log.debug("__ correct_data: {}".format( + correct_data + )) + + for knob_data in correct_data["knobs"]: + knob_type = knob_data["type"] + self.log.debug("__ knob_type: {}".format( + knob_type + )) + + if ( + knob_type == "__legacy__" + ): + raise PublishXmlValidationError( + self, ( + "Please update data in settings 'project_settings" + "/nuke/imageio/nodes/requiredNodes'" + ), + key="legacy" + ) - for knob_data in check_knobs: key = knob_data["name"] value = knob_data["value"] node_value = write_node[key].value() diff --git a/openpype/hosts/nuke/startup/menu.py b/openpype/hosts/nuke/startup/menu.py index 5e29121e9be..613d5083878 100644 --- a/openpype/hosts/nuke/startup/menu.py +++ b/openpype/hosts/nuke/startup/menu.py @@ -1,64 +1,5 @@ -import nuke -import os - -from openpype.lib import Logger from openpype.pipeline import install_host -from openpype.hosts.nuke import api -from openpype.hosts.nuke.api.lib import ( - on_script_load, - check_inventory_versions, - WorkfileSettings, - dirmap_file_name_filter, - add_scripts_gizmo -) -from openpype.settings import get_project_settings - -log = Logger.get_logger(__name__) - - -install_host(api) - -# fix ffmpeg settings on script -nuke.addOnScriptLoad(on_script_load) - -# set checker for last versions on loaded containers -nuke.addOnScriptLoad(check_inventory_versions) -nuke.addOnScriptSave(check_inventory_versions) - -# # set apply all workfile settings on script load and save -nuke.addOnScriptLoad(WorkfileSettings().set_context_settings) - -nuke.addFilenameFilter(dirmap_file_name_filter) - -log.info('Automatic syncing of write file knob to script version') - - -def add_scripts_menu(): - try: - from scriptsmenu import launchfornuke - except ImportError: - log.warning( - "Skipping studio.menu install, because " - "'scriptsmenu' module seems unavailable." - ) - return - - # load configuration of custom menu - project_settings = get_project_settings(os.getenv("AVALON_PROJECT")) - config = project_settings["nuke"]["scriptsmenu"]["definition"] - _menu = project_settings["nuke"]["scriptsmenu"]["name"] - - if not config: - log.warning("Skipping studio menu, no definition found.") - return - - # run the launcher for Maya menu - studio_menu = launchfornuke.main(title=_menu.title()) - - # apply configuration - studio_menu.build_from_configuration(studio_menu, config) - - -add_scripts_menu() +from openpype.hosts.nuke.api import NukeHost -add_scripts_gizmo() +host = NukeHost() +install_host(host) diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index ccb5be75dc7..d1948d8d50a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -42,7 +42,7 @@ def process(self, instance): instance.data["toBeRenderedOn"] = "deadline" families = instance.data["families"] - node = instance[0] + node = instance.data["transientData"]["node"] context = instance.context # get default deadline webservice url from deadline module diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json index f0be8f95f4d..caa2a8a2060 100644 --- a/openpype/settings/defaults/project_anatomy/imageio.json +++ b/openpype/settings/defaults/project_anatomy/imageio.json @@ -156,7 +156,7 @@ }, { "plugins": [ - "CreateWriteStill" + "CreateWriteImage" ], "nukeNodeClass": "Write", "knobs": [ diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 57a09086ca6..5aeca288ad9 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -1,7 +1,7 @@ { "general": { "menu": { - "create": "ctrl+shift+alt+c", + "create": "ctrl+alt+c", "publish": "ctrl+alt+p", "load": "ctrl+alt+l", "manage": "ctrl+alt+m", @@ -248,12 +248,15 @@ ], "create": { "CreateWriteRender": { - "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", - "defaults": [ + "temp_rendering_path_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", + "default_variants": [ "Main", "Mask" ], - "knobs": [], + "instance_attributes": [ + "reviewable", + "farm_rendering" + ], "prenodes": { "Reformat01": { "nodeclass": "Reformat", @@ -274,37 +277,39 @@ } }, "CreateWritePrerender": { - "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", - "use_range_limit": true, - "defaults": [ + "temp_rendering_path_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}", + "default_variants": [ "Key01", "Bg01", "Fg01", "Branch01", "Part01" ], - "reviewable": false, - "knobs": [], + "instance_attributes": [ + "farm_rendering", + "use_range_limit" + ], "prenodes": {} }, - "CreateWriteStill": { - "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{ext}", - "defaults": [ - "ImageFrame", + "CreateWriteImage": { + "temp_rendering_path_template": "{work}/renders/nuke/{subset}/{subset}.{ext}", + "default_variants": [ + "StillFrame", "MPFrame", "LayoutFrame" ], - "knobs": [], + "instance_attributes": [ + "use_range_limit" + ], "prenodes": { "FrameHold01": { "nodeclass": "FrameHold", "dependent": "", "knobs": [ { - "type": "formatable", + "type": "expression", "name": "first_frame", - "template": "{frame}", - "to_type": "number" + "expression": "parent.first" } ] } @@ -312,7 +317,7 @@ } }, "publish": { - "PreCollectNukeInstances": { + "CollectInstanceData": { "sync_workfile_version_on_families": [ "nukenodes", "camera", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index 154eca254bc..b1a8cc18127 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -17,11 +17,6 @@ "key": "menu", "label": "OpenPype Menu shortcuts", "children": [ - { - "type": "text", - "key": "create", - "label": "Create..." - }, { "type": "text", "key": "publish", @@ -106,26 +101,20 @@ "children": [ { "type": "text", - "key": "fpath_template", - "label": "Path template" + "key": "temp_rendering_path_template", + "label": "Temporary rendering path template" }, { "type": "list", - "key": "defaults", - "label": "Subset name defaults", + "key": "default_variants", + "label": "Default variants", "object_type": { "type": "text" } }, { "type": "schema_template", - "name": "template_nuke_knob_inputs", - "template_data": [ - { - "label": "Node knobs", - "key": "knobs" - } - ] + "name": "template_nuke_write_attrs" }, { "key": "prenodes", @@ -169,36 +158,20 @@ "children": [ { "type": "text", - "key": "fpath_template", - "label": "Path template" - }, - { - "type": "boolean", - "key": "use_range_limit", - "label": "Use Frame range limit by default" + "key": "temp_rendering_path_template", + "label": "Temporary rendering path template" }, { "type": "list", - "key": "defaults", - "label": "Subset name defaults", + "key": "default_variants", + "label": "Default variants", "object_type": { "type": "text" } }, - { - "type": "boolean", - "key": "reviewable", - "label": "Add reviewable toggle" - }, { "type": "schema_template", - "name": "template_nuke_knob_inputs", - "template_data": [ - { - "label": "Node knobs", - "key": "knobs" - } - ] + "name": "template_nuke_write_attrs" }, { "key": "prenodes", @@ -236,32 +209,26 @@ { "type": "dict", "collapsible": true, - "key": "CreateWriteStill", - "label": "CreateWriteStill", + "key": "CreateWriteImage", + "label": "CreateWriteImage", "is_group": true, "children": [ { "type": "text", - "key": "fpath_template", - "label": "Path template" + "key": "temp_rendering_path_template", + "label": "Temporary rendering path template" }, { "type": "list", - "key": "defaults", - "label": "Subset name defaults", + "key": "default_variants", + "label": "Default variants", "object_type": { "type": "text" } }, { "type": "schema_template", - "name": "template_nuke_knob_inputs", - "template_data": [ - { - "label": "Node knobs", - "key": "knobs" - } - ] + "name": "template_nuke_write_attrs" }, { "key": "prenodes", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index c91d3c0e3d1..5b9145e7d98 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -11,8 +11,8 @@ { "type": "dict", "collapsible": true, - "key": "PreCollectNukeInstances", - "label": "PreCollectNukeInstances", + "key": "CollectInstanceData", + "label": "CollectInstanceData", "is_group": true, "children": [ { diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json index 52a14e06363..c9dee8681ab 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_knob_inputs.json @@ -28,6 +28,22 @@ } ] }, + { + "key": "expression", + "label": "Expression", + "children": [ + { + "type": "text", + "key": "name", + "label": "Name" + }, + { + "type": "text", + "key": "expression", + "label": "Expression" + } + ] + }, { "key": "formatable", "label": "Formate from template", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_write_attrs.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_write_attrs.json new file mode 100644 index 00000000000..8be48e669de --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_nuke_write_attrs.json @@ -0,0 +1,19 @@ +[ + { + "key": "instance_attributes", + "label": "Instance attributes", + "type": "enum", + "multiselection": true, + "enum_items": [ + { + "reviewable": "Reviewable" + }, + { + "farm_rendering": "Farm rendering" + }, + { + "use_range_limit": "Use range limit" + } + ] + } +]