Skip to content
Cannot retrieve contributors at this time
bl_info = {
"name": "NodeIO",
"author": "Jacob Morris",
"version": (0, 6, 0),
"blender": (2, 79, 0),
"location": "Node Editor > Properties",
"description": "Allows The Exporting And Importing Of Node Trees Via .bnodes Files",
"category": "Import-Export"
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <>.
import bpy
from bpy.props import StringProperty, EnumProperty, BoolProperty
from datetime import datetime
from time import tzname
import operator
from inspect import getmembers
from os import path, mkdir, listdir, walk, remove as remove_file, sep as os_file_sep
from shutil import copyfile, rmtree
import zipfile
from mathutils import *
import json
VERSION = (0, 6, 0)
DEBUG_FILE = False # makes JSON file more human readable at the cost of file-size
def make_list(data):
out = []
for i in data:
if isinstance(i, (bool, str)):
else: # int, float
out.append(round(i, ROUND))
return out
def collect_node_data(n: bpy.types.Node):
ns, inputs, outputs, dependencies = [], [], [], []
node_data = {"inputs": inputs, "outputs": outputs, "node_specific": ns, "bl_idname": n.bl_idname}
is_group = False
if n.bl_idname in ("ShaderNodeGroup", "TextureNodeGroup") or n.bl_idname[0:11] == "SvGroupNode":
is_group = True
# certain nodes that do not support some operations, like having no .inputs or .outputs,
node_exclude_list = ['NodeReroute', 'NodeGroupInput', 'NodeGroupOutput']
socket_field_list = ['default_value', "value", "objectName", "fontName", "category", "groupName", "textBlockName",
"sequenceName", 'isUsed', 'easeIn', 'easeOut']
# types that can be converted to lists
list_types = (Color, Vector, Euler, Quaternion, bpy.types.bpy_prop_array)
if n.bl_idname not in node_exclude_list: # Reroute does have in and out, but does not know type until linked
# inputs
for j in range(len(n.inputs)):
socket = n.inputs[j]
data = {"index": j, "bl_idname": socket.bl_idname, 'values': {}}
for i in socket_field_list:
val = eval("socket.{}".format(i))
if isinstance(val, list_types): # list
data["values"][i] = make_list(val)
elif isinstance(val, (str, bool)):
data["values"][i] = val
elif isinstance(val, (float, int)):
data["values"][i] = round(val, ROUND)
except AttributeError:
if data['values']:
# outputs
for j in range(len(n.outputs)):
socket = n.outputs[j]
data = {"index": j, "bl_idname": socket.bl_idname, 'values': {}}
for i in socket_field_list:
val = eval("socket.{}".format(i))
if isinstance(val, list_types): # list
data["values"][i] = make_list(val)
elif isinstance(val, (str, bool)):
data["values"][i] = val
elif isinstance(val, (float, int)):
data["values"][i] = round(val, ROUND)
except AttributeError:
if data['values']:
elif n.bl_idname == "NodeGroupInput":
temp = []
for i in n.outputs:
if i.bl_idname != "NodeSocketVirtual":
ns += ["group_input", temp]
elif n.bl_idname == "NodeGroupOutput":
temp = []
for i in n.inputs:
if i.bl_idname != "NodeSocketVirtual":
ns += ["group_output", temp]
# list of default values to ignore for smaller file-size, or because not needed, also if property is read-only
exclude_attributes = {'__doc__', '__module__', '__slots__', 'bl_description', 'bl_height_default', 'bl_height_max',
'bl_height_min', 'bl_icon', 'bl_rna', 'bl_static_type', 'bl_width_default', 'bl_width_min',
'bl_width_max', 'color_mapping', 'draw_buttons', 'draw_buttons_ext', 'image_user',
'input_template', 'inputs', 'internal_links', 'is_registered_node_type', 'bl_label',
'output_template', 'outputs', 'poll', 'poll_instance', 'rna_type', 'shading_compatibility',
'show_options', 'show_preview', 'show_texture', 'socket_value_update', 'texture_mapping',
'type', 'update', 'viewLocation', 'width_hidden', 'bl_idname', 'dimensions',
'isAnimationNode', 'evaluationExpression', 'socketIdentifier', 'canCache',
'iterateThroughLists', 'identifier', 'scale_off', 'orient_axis', 'generic_enum', 'objects',
'particles', 'uv'}
exclude_nodes = {'SvGetPropNode', "SvSetPropNode"}
# Manual Attribute Collection ------------------------------------------->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
if n.bl_idname[0:11] == "SvGroupNode":
node_data[""] =
# Automatic Attribute Collection ---------------------------------------->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
if n.bl_idname not in exclude_nodes:
for method in getmembers(n):
if method[0] not in exclude_attributes:
t = method[1] # type of value
val = getattr(n, method[0]) # get value
# special handling for certain types
if isinstance(t, list_types): # TUPLE
ns += [method[0], make_list(val)]
elif isinstance(t, (bpy.types.CurveMapping, bpy.types.ShaderNodeRGBCurve)): # CURVES
if isinstance(t, bpy.types.CurveMapping):
if isinstance(n, bpy.types.TextureNodeCurveTime):
c = n.curve
c = n.mapping
else: # happens with n_InterpolationFromCurveMappingNode, has ShaderNodeRGBCurve which has curve
c = n.curveNode.mapping
curves = [make_list(c.black_level), make_list(c.white_level),
str(c.clip_max_x), str(c.clip_max_y), str(c.clip_min_x),
str(c.clip_min_y), str(c.use_clip)]
for curve in c.curves:
points = [curve.extend]
for point in curve.points:
points.append([make_list(point.location), point.handle_type])
ns += ["mapping", curves]
elif isinstance(t, bpy.types.ColorRamp): # COLOR RAMP
els = []
for j in n.color_ramp.elements:
cur_el = [j.position, make_list(j.color)]
ns += ["color_ramp.color_mode", n.color_ramp.color_mode, "color_ramp.interpolation",
n.color_ramp.interpolation, "color_ramp.elements", els]
elif isinstance(t, bpy.types.NodeTree): # NODE TREE
ns += ["",]
elif isinstance(t, bpy.types.Image) and n.image is not None: # IMAGE
ns += ["image",]
dependencies.append(['image',, n.image.filepath])
elif isinstance(t, bpy.types.ParticleSystem): # PARTICLE SYSTEM - needs objects and particle system
ns += [method[0], [n.object,]]
elif isinstance(t, (str, bool)): # STRING
ns += [method[0], val]
elif isinstance(t, (int, float)): # FlOAT, INTEGER
ns += [method[0], round(val, ROUND)]
elif isinstance(t, bpy.types.Node): # FRAME NODE
ns += [method[0],]
elif isinstance(t, bpy.types.Texture): # TEXTURE NODE
ns += [method[0],]
# extra information needed for creating nodes
if n.bl_idname == 'an_CreateListNode': # have to determine number of inputs, has to be evaluated after assignedType
ns += ['an_list_size', len(n.inputs) - 1]
elif n.bl_idname in ('SvGetPropNode', 'SvSetPropNode'):
ns += ['prop_name', n.prop_name, 'location', make_list(n.location), 'name',, 'color', make_list(n.color),
'hide', n.hide]
return [node_data, is_group, dependencies]
# recursive method that collects all nodes and if group node goes and collects its nodes
# data is added to data in [[nodes, links], [nodes, links]] group by group
def collect_nodes(nodes, links, dependencies, names, name, data):
m_n = []
m_l = []
for n in nodes: # nodes
out, is_group, im = collect_node_data(n)
if is_group and n.bl_idname in ("ShaderNodeGroup", "TextureNodeGroup"):
collect_nodes(n.node_tree.nodes, n.node_tree.links, dependencies, names,, data)
elif is_group and n.bl_idname[0:11] == "SvGroupNode":
collect_nodes(n.monad.nodes, n.monad.links, dependencies, names,, data)
for l in links: # links
out = link_info(l)
data.append([m_n, m_l])
names[name] = len(data) - 1
def link_info(link):
out = []
fr = link.from_socket.path_from_id()
fr = fr.split(".")
if len(fr) == 3:
fr = fr[2]
fr = fr[1]
n1 = fr.index("[")
n2 = fr.index("]")
ind = int(fr[n1 + 1:n2])
fr = link.to_socket.path_from_id()
fr = fr.split(".")
if len(fr) == 3:
fr = fr[2]
fr = fr[1]
n1 = fr.index("[")
n2 = fr.index("]")
ind = int(fr[n1 + 1:n2])
return out
def export_node_tree(self, context):
to_export = []
# export_type = context.scene.node_io_export_type
export_path = bpy.path.abspath(context.scene.node_io_export_path)
folder_path = None
folder_name = None
node_tree = context.space_data.node_tree
# check data
if not export_path:{"ERROR"}, "NodeIO: Empty Export Path")
elif not path.exists(export_path):{"ERROR"}, "NodeIO: Export Path '{}' Does Not Exist".format(export_path))
elif node_tree is None:{"ERROR"}, "NodeIO: No Active Node Tree")
# COLLECT NEED INFORMATION: to_export allows multiple node_trees at a time. Info formatted into dict
# {"nodes":____, "links":____, "name":____, "bl_idname":_____}
if node_tree.bl_idname in ("ShaderNodeTree", "MitsubaShaderNodeTree"):
to_export.append({"nodes": node_tree.nodes, "links": node_tree.links, "name":, "bl_idname": node_tree.bl_idname})
elif node_tree.bl_idname in ("an_AnimationNodeTree", "SverchCustomTreeType"):
to_export.append({"nodes": node_tree.nodes, "links": node_tree.links, "name":,
"bl_idname": node_tree.bl_idname})
elif node_tree.bl_idname == "TextureNodeTree":
to_export.append({"nodes": node_tree.nodes, "links": node_tree.links, "name":, "bl_idname": node_tree.bl_idname})
# create folder if more then one node_tree, or if paths are being made relative and there might be dependencies
if len(to_export) > 1 or context.scene.node_io_dependency_save_type == "2":
if len(to_export) > 1:
folder_name = "mat_group_{}".format(len(to_export))
folder_name = to_export[0]['name']
folder_path = export_path + os_file_sep + folder_name
except FileExistsError:{"INFO"}, "NodeIO: Directory '{}' Already Exists, Will Add/Overwrite Files In Directory".
folder_path = export_path
# export materials
for node_tree in to_export:
json_root = {}
names = {}
data, dependencies = [], []
m_links, m_nodes = node_tree["links"], node_tree["nodes"]
# get node data
collect_nodes(m_nodes, m_links, dependencies, names, "main", data)
# write data
# material attribs
t =
date_string = "{}/{}/{} at {}:{}:{} in {}".format(t.month,, t.year, t.hour, t.minute,
t.second, tzname[0])
node_counter = 0
for group in names:
json_root[group] = {'nodes': data[names[group]][0], 'links': data[names[group]][1]}
node_counter += len(data[names[group]][0])
# get order of groups
pre_order = sorted(names.items(), key=operator.itemgetter(1))
order = [i[0].replace("/", "_") for i in pre_order]
json_root['__info__'] = {'number_of_nodes': node_counter, 'group_order': order, "render_engine":
context.scene.render.engine, "node_tree_name": node_tree["name"],
"date_created": date_string, "version": VERSION, "node_tree_id":
# dependencies
depend_out = [] # collect all dependencies to place as attribute of root element so they can be imported first
duplicates = {}
# absolute filepaths
if context.scene.node_io_dependency_save_type == "1":
json_root['__info__']['path_type'] = "absolute"
# of format [node, node,...] where each node is [depend, depend,...] and depend is [type, name, path]
for node in dependencies:
for depend in node:
if depend[1] not in duplicates or depend[1] in duplicates and depend[2] != duplicates[depend[1]]:
depend_out.append([depend[0], depend[1], bpy.path.abspath(depend[2])])
duplicates[depend[1]] = bpy.path.abspath(depend[2])
# relative filepaths
json_root['__info__']['path_type'] = "relative"
for node in dependencies:
for depend in node:
if depend[1] not in duplicates:
depend_path = bpy.path.abspath(depend[2])
depend_out.append([depend[0], depend[1], os_file_sep + depend[1]])
copyfile(depend_path, folder_path + os_file_sep + depend[1])
duplicates[depend[1]] = depend[1]
json_root['__info__']['dependencies'] = depend_out
save_path = folder_path + os_file_sep + node_tree["name"] + ".bnodes"
# write file
file = open(save_path, 'w')
if DEBUG_FILE: # make multiple lines and indent if trying to debug
json.dump(json_root, file, indent=4)
json.dump(json_root, file)
file.close(){"INFO"}, "NodeIO: Exported '{}' With {} Nodes And {} Dependencies".format(
json_root['__info__']['node_tree_name'], json_root['__info__']['number_of_nodes'],
except (PermissionError, FileNotFoundError):{"ERROR"}, "NodeIO: Permission Denied '{}', Cannot Continue".format(save_path))
# zip folder
if folder_path != export_path and context.scene.node_io_is_compress: # if folder has been created
if path.exists(folder_path + ".zip"): # if zipped file is already there, delete
remove_file(folder_path + ".zip")
zf = zipfile.ZipFile(folder_path + ".zip", "w", zipfile.ZIP_DEFLATED)
for dirname, subdirs, files in walk(folder_path):
for filename in files:
zf.write(path.join(dirname, filename), arcname=filename)
# delete non-compressed folder
def import_node_tree(self, context):
if context.scene.node_io_import_type == "1": # single file
import_path = bpy.path.abspath(context.scene.node_io_import_path_file)
folder_path = path.dirname(import_path)
else: # all files in folder
import_path = bpy.path.abspath(context.scene.node_io_import_path_dir)
folder_path = import_path
# check file path
if not import_path:{"ERROR"}, "NodeIO: Empty Import Path")
elif not path.exists(import_path):{"ERROR"}, "NodeIO: Filepath '{}' Does Not Exist".format(import_path))
elif context.scene.node_io_import_type == "1" and not import_path.endswith(".bnodes") and \
not import_path.endswith('.zip'):{"ERROR"}, "NodeIO: Filepath Does Not End With .bnodes")
# collect filepaths
import_list = []
if context.scene.node_io_import_type == "2" and not import_path.endswith('.zip'): # import all files in folder
files = listdir(import_path)
for file in files:
if file.endswith(".bnodes"):
import_list.append(import_path + os_file_sep + file)
elif import_path.endswith('.zip'):
zip_folder = zipfile.ZipFile(import_path)
folder_path = path.dirname(import_path) + os_file_sep + path.basename(import_path).split(".")[0]
mkdir(folder_path) # create new folder
except FileExistsError:{"INFO"}, "NodeIO: Folder Already Exists")
for file_name in zip_folder.namelist(): # open zip file and write it to new directory
f = open(folder_path + os_file_sep + file_name, 'wb')
f.write(, 'r').read())
if file_name.endswith('.bnodes'):
import_list.append(folder_path + os_file_sep + file_name)
# for each .bnodes file import and create material
for file_path in import_list:
file = open(file_path, 'r')
root = json.load(file)
node_tree, nodes, links = None, None, None
# determine type
if root['__info__']['node_tree_id'] == 'ShaderNodeTree':
# make sure in correct render mode
if root['__info__']['render_engine'] != context.scene.render.engine:{"ERROR"}, "NodeIO: Please Switch To '{}' Engine".format(root['__info__']
node_tree =['__info__']['node_tree_name'])
node_tree.use_nodes = True
nodes = node_tree.node_tree.nodes
links = node_tree.node_tree.links
elif root['__info__']['node_tree_id'] == "MitsubaShaderNodeTree":
# make sure in correct render mode
if root['__info__']['render_engine'] != context.scene.render.engine:{"ERROR"}, "NodeIO: Please Switch To '{}' Engine".format(root['__info__']
node_tree =['__info__']['node_tree_name'])
context.space_data.node_tree = node_tree
mitsuba_tree =['__info__']['node_tree_name'],
nodes = mitsuba_tree.nodes
links = mitsuba_tree.links
node_tree.mitsuba_nodes.nodetree =
elif root['__info__']['node_tree_id'] in ("an_AnimationNodeTree", "SverchCustomTreeType"):
node_tree =['__info__']['node_tree_name'],
context.space_data.node_tree = node_tree
nodes = node_tree.nodes
links = node_tree.links
elif root['__info__']['node_tree_id'] == "TextureNodeTree":
node_tree =['__info__']['node_tree_name'], type='NONE')
node_tree.use_nodes = True
nodes = node_tree.node_tree.nodes
links = node_tree.node_tree.links
# remove any default nodes
for i in nodes:
# import dependencies
dependencies = root['__info__']['dependencies']
depend_errors = 0
for depend in dependencies:
if depend[0] == "image" and depend[1] not in
if root['__info__']['path_type'] == "relative":
image = + os_file_sep + depend[1])
image =[2]) = depend[1] # set name in-case the image was renamed
except RuntimeError:
depend_errors += 1
if depend_errors:{"ERROR"}, "NodeIO: " + str(depend_errors) + " Dependency(ies) Couldn't Be Loaded")
# add new nodes
order = root['__info__']['group_order']
for group_name in order:
if group_name not in or group_name == "main": # create only if needed
group = root[group_name]
# set up which node tree to use (used for node groups in node tree)
if group_name == "main":
nt = nodes
elif root['__info__']['node_tree_id'] == "ShaderNodeTree":
nt =, "ShaderNodeTree")
elif root['__info__']['node_tree_id'] == "TextureNodeTree":
nt =, "TextureNodeTree")
elif root['__info__']['node_tree_id'] == 'SverchCustomTreeType':
nt =, 'SverchGroupTreeType')
parents = []
for node in group['nodes']:
parent = {}
# check if node is custom then make sure it is installed
if node["bl_idname"] == "GenericNoteNode" and \
("generic_note" not in bpy.context.user_preferences.addons.keys() and
"genericnote" not in bpy.context.user_preferences.addons.keys()):{"WARNING"}, "Generic Note Node Add-on Not Installed")
# retrieve node name, create node
node_id = node['bl_idname']
if node_id[0:11] == "SvGroupNode": # find what the node_groups id is and use it
node_id =[node['']].cls_bl_idname
if group_name == "main":
temp =
temp =
# node specific is first so that groups are set up first
nos = node["node_specific"]
if nos:
for i in range(0, len(nos), 2): # step by two because name, value, name, value...
att = nos[i]
val = nos[i + 1]
# group node inputs and outputs
if att in ("group_input", "group_output"):
for sub in range(0, len(val), 2):
if att == "group_input" and val[sub] != "NodeSocketVirtual":[sub], val[sub + 1])
elif att == "group_output" and val[sub] != "NodeSocketVirtual":[sub], val[sub + 1])
elif att == "parent" and val is not None: # don't set parent in case not created yet
parent['parent'] = val
elif val is not None:
set_attributes(self, temp, val, att)
# inputs
if node['inputs']:
for i in node['inputs']:
for val_key in i['values'].keys():
if isinstance(i['values'][val_key], str):
exec("temp.inputs[{}].{} = '{}'".format(i['index'], val_key,
exec("temp.inputs[{}].{} = {}".format(i['index'], val_key,
# outputs
if node['outputs']:
for i in node['outputs']:
for val_key in i['values'].keys():
if isinstance(i['values'][val_key], str):
exec("temp.outputs[{}].{} = '{}'".format(i['index'], val_key,
exec("temp.outputs[{}].{} = {}".format(i['index'], val_key,
# deal with parents
if parent:
parent['node'] =
parent['location'] = temp.location
# set parents
for parent in parents:
if group_name != "main":
nt.nodes[parent['node']].parent = nt.nodes[parent['parent']]
nt.nodes[parent['node']].location = parent['location'] + nt.nodes[parent['parent']].location
nt[parent['node']].parent = nt[parent['parent']]
nt[parent['node']].location = parent['location'] + nt[parent['parent']].location
if group_name == "main":
use_nt, use_ln = nt, links
use_nt, use_ln = nt.nodes, nt.links
for link in group['links']:
o = use_nt[link[0]].outputs[link[1]]
i = use_nt[link[2]].inputs[link[3]], i)
# add material to object
if context.object is not None and context.scene.node_io_is_auto_add:
if root['__info__']['node_tree_id'] in ('ShaderNodeTree', 'MitsubaShaderNodeTree'):
elif root['__info__']['node_tree_id'] == "TextureNodeTree" \
and context.active_object.active_material is not None:
context.active_object.active_material.active_texture = node_tree{"INFO"}, "NodeIO: Imported {} With {} Nodes".format(root['__info__']['node_tree_name'],
def set_attributes(self, temp, val, att):
# determine attribute type, exec() can be used if value gets directly set to attribute
if att == "image" and val in
temp.image =[val]
elif att == 'an_list_size': # add correct number of inputs for animation node list
for i in range(val):
elif att == "object" and val in
temp.object =[val]
elif att == "particle_system" and val[0] in \
and val[1] in[val[0]].particle_systems:
temp.particle_system =[val[0]].particle_systems[val[1]]
elif att == "color_ramp.elements":
e = temp.color_ramp.elements
if len(val) >= 2:
e[0].position = val[0][0]
e[0].color = val[0][1]
e[1].position = val[1][0]
e[1].color = val[1][1]
del val[0:2]
for el in val:
e_temp =[0])
e_temp.color = el[1]
elif att == "" and val in
temp.node_tree =[val]
elif att == "material" and val in
temp.material =[val]
elif att == "mapping":
if temp.bl_idname == "an_InterpolationFromCurveMappingNode": # contains ShaderNodeRGBCurve, which has curve
node = temp.curveNode.mapping
elif temp.bl_idname == "TextureNodeCurveTime":
node = temp.curve
node = temp.mapping
# set curves
node.black_level = val[0]
node.white_level = val[1]
node.clip_max_x = float(val[2])
node.clip_max_y = float(val[3])
node.clip_min_x = float(val[4])
node.clip_min_y = float(val[5])
node.use_clip = True if val[6] == "True" else False
for i in range(7):
del val[0]
# go through each curve
counter = 0
for i in val:
# set first two points
curves = node.curves
curves[counter].extend = i[0]
del i[0]
curves[counter].points[0].location = i[0][0]
curves[counter].points[0].handle_type = i[0][1]
curves[counter].points[1].location = i[1][0]
curves[counter].points[1].handle_type = i[1][1]
del i[0:2]
for i2 in i:
temp_point = node.curves[counter][0][0], i2[0][1])
temp_point.handle_type = i2[1]
counter += 1
elif att == "texture" and val in
temp.texture =[val]
if isinstance(val, str):
exec("temp.{} = '{}'".format(att, val))
exec("temp.{} = {}".format(att, val))
except (AttributeError, TypeError) as e:{"WARNING"}, "NodeIO: Error={}, Node Name={}, Node ID={}, Attribute={}, Value={}".
format(type(e).__name__,, temp.bl_idname, att, val))
bpy.types.Scene.node_io_import_export = EnumProperty(name="Import/Export", items=(("1", "Import", ""),
("2", "Export", "")))
bpy.types.Scene.node_io_export_path = StringProperty(name="Export Path", subtype="DIR_PATH")
bpy.types.Scene.node_io_import_path_file = StringProperty(name="Import Path", subtype="FILE_PATH")
bpy.types.Scene.node_io_import_path_dir = StringProperty(name="Import Path", subtype="DIR_PATH")
bpy.types.Scene.node_io_dependency_save_type = EnumProperty(name="Dependency Paths", items=(("1", "Absolute Paths", ""),
("2", "Make Paths Relative",
bpy.types.Scene.node_io_is_auto_add = BoolProperty(name="Add Node Tree To Object?", default=True)
bpy.types.Scene.node_io_import_type = EnumProperty(name="Import Type", items=(("1", "File", "Imports Just Selected " +
"File Unless .zip, In Which Case All " +
"Files Within .zip Folder Are Imported"),
("2", "Folder",
"Imports All Files Within Folder")))
bpy.types.Scene.node_io_is_compress = BoolProperty(name="Compress Folder?")
class NodeIOPanel(bpy.types.Panel):
bl_idname = "OBJECT_PT_node_io_panel"
bl_label = "NodeIO Panel"
bl_space_type = "NODE_EDITOR"
bl_region_type = "UI"
def draw(self, context):
layout = self.layout
layout.prop(context.scene, "node_io_import_export")
if context.scene.node_io_import_export == "2":
layout.prop(context.scene, "node_io_dependency_save_type")
layout.prop(context.scene, "node_io_is_compress", icon="FILTER")
layout.prop(context.scene, "node_io_export_path")
layout.operator("export.node_io_export", icon="ZOOMOUT")
layout.prop(context.scene, "node_io_import_type")
layout.prop(context.scene, "node_io_is_auto_add", icon="NODETREE")
if context.scene.node_io_import_type == "1":
layout.prop(context.scene, "node_io_import_path_file")
layout.prop(context.scene, "node_io_import_path_dir")
layout.operator("import.node_io_import", icon="ZOOMIN")
class NodeIOExport(bpy.types.Operator):
bl_idname = "export.node_io_export"
bl_label = "Export Node Tree"
def execute(self, context):
export_node_tree(self, context)
return {"FINISHED"}
class NodeIOImport(bpy.types.Operator):
bl_idname = "import.node_io_import"
bl_label = "Import Node Tree"
def execute(self, context):
import_node_tree(self, context)
return {"FINISHED"}
def register():
def unregister():
if __name__ == "__main__":