diff --git a/UILists/Custom_Metadata_UIList.py b/UILists/custom_metadata_ui_list.py similarity index 100% rename from UILists/Custom_Metadata_UIList.py rename to UILists/custom_metadata_ui_list.py diff --git a/UILists/Logic_UIList.py b/UILists/logic_ui_list.py similarity index 100% rename from UILists/Logic_UIList.py rename to UILists/logic_ui_list.py diff --git a/__init__.py b/__init__.py index 0e33683..7de5fdb 100644 --- a/__init__.py +++ b/__init__.py @@ -36,33 +36,33 @@ sys.path.append(os.path.dirname(os.path.realpath(__file__))) # Local file imports: from main import \ - Helpers, \ - DNA_Generator, \ - Exporter, \ - HeadlessUtil, \ - Intermediate, \ - Logic, \ - Material_Generator, \ - Metadata, \ - Refactorer + helpers, \ + dna_generator, \ + exporter, \ + headless_util, \ + intermediate, \ + logic, \ + material_generator, \ + metadata_templates, \ + refactorer from UILists import \ - Custom_Metadata_UIList, \ - Logic_UIList + custom_metadata_ui_list, \ + logic_ui_list if "bpy" in locals(): modules = { - "Helpers": Helpers, - "DNA_Generator": DNA_Generator, - "Exporter": Exporter, - "HeadlessUtil": HeadlessUtil, - "Intermediate": Intermediate, - "Logic": Logic, - "Material_Generator": Material_Generator, - "Metadata": Metadata, - "Refactorer": Refactorer, - "Custom_Metadata_UIList": Custom_Metadata_UIList, - "Logic_UIList": Logic_UIList, + "helpers": helpers, + "dna_generator": dna_generator, + "exporter": exporter, + "headless_util": headless_util, + "intermediate": intermediate, + "logic": logic, + "material_generator": material_generator, + "metadata_templates": metadata_templates, + "refactorer": refactorer, + "custom_metadata_ui_list": custom_metadata_ui_list, + "logic_ui_list": logic_ui_list, } for i in modules: @@ -86,7 +86,7 @@ def Refresh_UI(dummy1, dummy2): global combinations global recommended_limit - combinations = (Helpers.get_combinations()) + combinations = (helpers.get_combinations()) recommended_limit = int(round(combinations / 2)) # Add panel classes that require refresh to this refresh_panels tuple: @@ -277,7 +277,7 @@ def runAsHeadless(): f"animationEnum={settings.animationEnum}\n" f"modelBool={str(settings.modelBool)}\n" f"modelEnum={settings.modelEnum}\n" - f"batchToGenerate={str(settings.batchToGenerate)}\n" + f"batch_to_generate={str(settings.batchToGenerate)}\n" f"cardanoMetaDataBool={str(settings.cardanoMetaDataBool)}\n" f"cardano_description={settings.cardano_description}\n" f"erc721MetaData={str(settings.erc721MetaData)}\n" @@ -291,7 +291,7 @@ def runAsHeadless(): ) print(output) - args, parser = HeadlessUtil.getPythonArgs() + args, parser = headless_util.getPythonArgs() settings = bpy.context.scene.input_tool @@ -342,13 +342,13 @@ def runAsHeadless(): input.batch_json_save_path = args.batch_data_path if args.operation == 'create-dna': - Intermediate.send_To_Record_JSON(input) + intermediate.send_To_Record_JSON(input) elif args.operation == 'generate-nfts': - Intermediate.render_and_save_NFTs(input) + intermediate.render_and_save_NFTs(input) elif args.operation == 'refactor-batches': - Refactorer.reformatNFTCollection(input) + refactorer.reformatNFTCollection(input) # ======== User input Property Group ======== # @@ -357,8 +357,16 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): nftName: bpy.props.StringProperty(name="NFT Name") - collectionSize: bpy.props.IntProperty(name="NFT Collection Size", default=1, min=1) # max=(combinations - offset) - nftsPerBatch: bpy.props.IntProperty(name="NFTs Per Batch", default=1, min=1) # max=(combinations - offset) + collectionSize: bpy.props.IntProperty( + name="NFT Collection Size", + default=1, + min=1 + ) # max=(combinations - offset) + nftsPerBatch: bpy.props.IntProperty( + name="NFTs Per Batch", + default=1, + min=1 + ) # max=(combinations - offset) save_path: bpy.props.StringProperty( name="Save Path", @@ -368,10 +376,16 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="DIR_PATH" ) - enableRarity: bpy.props.BoolProperty(name="Enable Rarity") + enableRarity: bpy.props.BoolProperty( + name="Enable Rarity" + ) - enableLogic: bpy.props.BoolProperty(name="Enable Logic") - enable_Logic_Json: bpy.props.BoolProperty(name="Use Logic.json instead") + enableLogic: bpy.props.BoolProperty( + name="Enable Logic" + ) + enable_Logic_Json: bpy.props.BoolProperty( + name="Use Logic.json instead" + ) logicFile: bpy.props.StringProperty( name="Logic File Path", description="Path where Logic.json is located.", @@ -380,7 +394,9 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="FILE_PATH" ) - enableMaterials: bpy.props.BoolProperty(name="Enable Materials") + enableMaterials: bpy.props.BoolProperty( + name="Enable Materials" + ) materialsFile: bpy.props.StringProperty( name="Materials File", description="Path where Materials.json is located.", @@ -390,7 +406,9 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ) # Generate NFTs Panel: - imageBool: bpy.props.BoolProperty(name="Image") + imageBool: bpy.props.BoolProperty( + name="Image" + ) imageEnum: bpy.props.EnumProperty( name="Image File Format", description="Select Image file format", @@ -400,7 +418,9 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - animationBool: bpy.props.BoolProperty(name="Animation") + animationBool: bpy.props.BoolProperty( + name="Animation" + ) animationEnum: bpy.props.EnumProperty( name="Animation File Format", description="Select Animation file format", @@ -414,7 +434,9 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - modelBool: bpy.props.BoolProperty(name="3D Model") + modelBool: bpy.props.BoolProperty( + name="3D Model" + ) modelEnum: bpy.props.EnumProperty( name="3D Model File Format", description="Select 3D Model file format", @@ -432,20 +454,37 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - batchToGenerate: bpy.props.IntProperty(name="Batch To Generate", default=1, - min=1) + batchToGenerate: bpy.props.IntProperty( + name="Batch To Generate", + default=1, + min=1 + ) # Refactor Batches & Create Metadata Panel: - cardanoMetaDataBool: bpy.props.BoolProperty(name="Cardano Cip") - cardano_description: bpy.props.StringProperty(name="Cardano description") + cardanoMetaDataBool: bpy.props.BoolProperty( + name="Cardano Cip" + ) + cardano_description: bpy.props.StringProperty( + name="Cardano description" + ) - solanaMetaDataBool: bpy.props.BoolProperty(name="Solana Metaplex") - solana_description: bpy.props.StringProperty(name="Solana description") + solanaMetaDataBool: bpy.props.BoolProperty( + name="Solana Metaplex" + ) + solana_description: bpy.props.StringProperty( + name="Solana description" + ) - erc721MetaData: bpy.props.BoolProperty(name="ERC721") - erc721_description: bpy.props.StringProperty(name="ERC721 description") + erc721MetaData: bpy.props.BoolProperty( + name="ERC721" + ) + erc721_description: bpy.props.StringProperty( + name="ERC721 description" + ) - enableCustomFields: bpy.props.BoolProperty(name="Enable Custom Metadata Fields") + enableCustomFields: bpy.props.BoolProperty( + name="Enable Custom Metadata Fields" + ) customfieldsFile: bpy.props.StringProperty( name="Custom Fields File", description="Path where Custom_Fields.json is located.", @@ -457,27 +496,55 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): # TODO: Add 'Other' panel inputs to Headless functionality. # Other Panel: - enableAutoSave: bpy.props.BoolProperty(name="Auto Save Before Generation", - description="Automatically saves your Blender file when 'Generate NFTs & Create Metadata' button is clicked") + enableAutoSave: bpy.props.BoolProperty( + name="Auto Save Before Generation", + description="Automatically saves your Blender file when 'Generate NFTs & Create Metadata' button is clicked" + ) - enableAutoShutdown: bpy.props.BoolProperty(name="Auto Shutdown", - description="Automatically shuts down your computer after a Batch is finished Generating") + enableAutoShutdown: bpy.props.BoolProperty( + name="Auto Shutdown", + description="Automatically shuts down your computer after a Batch is finished Generating" + ) - specify_timeBool: bpy.props.BoolProperty(name="Shutdown in a Given Amount of Time", - description="Wait a given amount of time after a Batch is generated before Automatic Shutdown") - hours: bpy.props.IntProperty(default=0, min=0) - minutes: bpy.props.IntProperty(default=0, min=0) + specify_timeBool: bpy.props.BoolProperty( + name="Shutdown in a Given Amount of Time", + description="Wait a given amount of time after a Batch is generated before Automatic Shutdown" + ) + hours: bpy.props.IntProperty( + default=0, min=0 + ) + minutes: bpy.props.IntProperty( + default=0, min=0 + ) - emailNotificationBool: bpy.props.BoolProperty(name="Email Notifications", - description="Receive Email Notifications from Blender once a batch is finished generating") - sender_from: bpy.props.StringProperty(name="From", default="from@example.com") - email_password: bpy.props.StringProperty(name="Password", subtype='PASSWORD') - receiver_to: bpy.props.StringProperty(name="To", default="to@example.com") + emailNotificationBool: bpy.props.BoolProperty( + name="Email Notifications", + description="Receive Email Notifications from Blender once a batch is finished generating" + ) + sender_from: bpy.props.StringProperty( + name="From", + default="from@example.com" + ) + email_password: bpy.props.StringProperty( + name="Password", + subtype='PASSWORD' + ) + receiver_to: bpy.props.StringProperty( + name="To", + default="to@example.com" + ) - enable_debug: bpy.props.BoolProperty(name="Enable Debug Mode", description="Allows you to run Blend_My_NFTs without generating any content files and includes more console information.") + enable_debug: bpy.props.BoolProperty( + name="Enable Debug Mode", + description="Allows you to run Blend_My_NFTs without generating any content files and includes more " + "console information." + ) # API Panel properties: - apiKey: bpy.props.StringProperty(name="API Key", subtype='PASSWORD') # Test code for future features + apiKey: bpy.props.StringProperty( + name="API Key", + subtype='PASSWORD' + ) # Test code for future features # ======== Main Operators ======== # @@ -500,7 +567,7 @@ class createData(bpy.types.Operator): self.report({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.") - Intermediate.send_To_Record_JSON(input) + intermediate.send_To_Record_JSON(input) self.report({'INFO'}, f"NFT Data created!") return {"FINISHED"} @@ -523,7 +590,7 @@ class exportNFTs(bpy.types.Operator): input = getBMNFTData() # Handling Custom Fields UIList input: - Intermediate.render_and_save_NFTs(input) + intermediate.render_and_save_NFTs(input) self.report({'INFO'}, f"All NFTs generated for batch {input.batchToGenerate}!") @@ -545,7 +612,7 @@ class resume_failed_batch(bpy.types.Operator): file_name = os.path.join(_batch_json_save_path, "Batch{}.json".format(_batchToGenerate)) batchData = json.load(open(file_name)) - _fail_state, _failed_batch, _failed_dna, _failed_dna_index = Helpers.check_failed_batches(_batch_json_save_path) + _fail_state, _failed_batch, _failed_dna, _failed_dna_index = helpers.check_failed_batches(_batch_json_save_path) render_settings = batchData["Generation Save"][-1]["Render_Settings"] @@ -553,7 +620,7 @@ class resume_failed_batch(bpy.types.Operator): nftName=render_settings["nftName"], save_path=_save_path, nftsPerBatch=render_settings["nftsPerBatch"], - batchToGenerate=render_settings["batchToGenerate"], + batchToGenerate=render_settings["batch_to_generate"], collectionSize=render_settings["collectionSize"], Blend_My_NFTs_Output=_Blend_My_NFTs_Output, @@ -608,7 +675,7 @@ class resume_failed_batch(bpy.types.Operator): custom_Fields=render_settings["custom_Fields"], ) - Exporter.render_and_save_NFTs(input) + exporter.render_and_save_nfts(input) self.report({'INFO'}, f"Resuming Failed Batch Generation!") @@ -628,7 +695,7 @@ class refactor_Batches(bpy.types.Operator): def execute(self, context): # Passing info to main functions for refactoring: - Refactorer.reformatNFTCollection(getBMNFTData()) + refactorer.reformatNFTCollection(getBMNFTData()) return {"FINISHED"} def invoke(self, context, event): @@ -682,7 +749,7 @@ class export_settings(bpy.types.Operator): f"modelEnum={settings.modelEnum}\n" "\n" "#Batch to generate\n" - f"batchToGenerate={str(settings.batchToGenerate)}\n" + f"batch_to_generate={str(settings.batchToGenerate)}\n" "\n" "#Metadata Format\n" f"cardanoMetaDataBool={str(settings.cardanoMetaDataBool)}\n" @@ -878,14 +945,14 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): row.operator("custom_metadata_fields_uilist.clear_list", icon="X") row = layout.row() - row.prop(input_tool_scene, "batchToGenerate") + row.prop(input_tool_scene, "batch_to_generate") save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) Blend_My_NFTs_Output = os.path.join(save_path, "Blend_My_NFTs Output", "NFT_Data") batch_json_save_path = os.path.join(Blend_My_NFTs_Output, "Batch_Data") nftBatch_save_path = os.path.join(save_path, "Blend_My_NFTs Output", "Generated NFT Batches") - fail_state, failed_batch, failed_dna, failed_dna_index = Helpers.check_failed_batches(batch_json_save_path) + fail_state, failed_batch, failed_dna, failed_dna_index = helpers.check_failed_batches(batch_json_save_path) if fail_state: row = layout.row() @@ -1037,7 +1104,7 @@ classes = ( BMNFTS_PT_GenerateNFTs, BMNFTS_PT_Refactor, BMNFTS_PT_Other, - ) + Custom_Metadata_UIList.classes_Custom_Metadata_UIList + Logic_UIList.classes_Logic_UIList + ) + custom_metadata_ui_list.classes_Custom_Metadata_UIList + logic_ui_list.classes_Logic_UIList def register(): @@ -1047,10 +1114,10 @@ def register(): bpy.types.Scene.input_tool = bpy.props.PointerProperty(type=BMNFTS_PGT_Input_Properties) bpy.types.Scene.custom_metadata_fields = CollectionProperty( - type=Custom_Metadata_UIList.CUSTOM_custom_metadata_fields_objectCollection) + type=custom_metadata_ui_list.CUSTOM_custom_metadata_fields_objectCollection) bpy.types.Scene.custom_metadata_fields_index = IntProperty() - bpy.types.Scene.logic_fields = CollectionProperty(type=Logic_UIList.CUSTOM_logic_objectCollection) + bpy.types.Scene.logic_fields = CollectionProperty(type=logic_ui_list.CUSTOM_logic_objectCollection) bpy.types.Scene.logic_fields_index = IntProperty() diff --git a/main/Exporter.py b/main/Exporter.py deleted file mode 100644 index 283d70c..0000000 --- a/main/Exporter.py +++ /dev/null @@ -1,586 +0,0 @@ -# Purpose: -# This file takes a given Batch created by DNA_Generator.py and tells blender to render the image or export a 3D model -# to the NFT_Output folder. - -import bpy -import os -import ssl -import time -import json -import smtplib -import datetime -import platform - -from .Helpers import TextColors, Loader -from .Metadata import createCardanoMetadata, createSolanaMetaData, createErc721MetaData - - -# Save info -def save_batch(batch, file_name): - saved_batch = json.dumps(batch, indent=1, ensure_ascii=True) - - with open(os.path.join(file_name), 'w') as outfile: - outfile.write(saved_batch + '\n') - - -def save_generation_state(input): - """Saves date and time of generation start, and generation types; Images, Animations, 3D Models, and the file types for each.""" - file_name = os.path.join(input.batch_json_save_path, "Batch{}.json".format(input.batchToGenerate)) - batch = json.load(open(file_name)) - - CURRENT_TIME = datetime.datetime.now().strftime("%H:%M:%S") - CURRENT_DATE = datetime.datetime.now().strftime("%d/%m/%Y") - LOCAL_TIMEZONE = str(datetime.datetime.now(datetime.timezone.utc)) - - if "Generation Save" in batch: - batch_save_number = int(batch[f"Generation Save"].index(batch[f"Generation Save"][-1])) - else: - batch_save_number = 0 - - batch["Generation Save"] = list() - batch["Generation Save"].append({ - "Batch Save Number": batch_save_number + 1, - "DNA Generated": None, - "Generation Start Date and Time": [CURRENT_TIME, CURRENT_DATE, LOCAL_TIMEZONE], - "Render_Settings": { - "nftName": input.nftName, - "save_path": input.save_path, - "nftsPerBatch": input.nftsPerBatch, - "batchToGenerate": input.batchToGenerate, - "collectionSize": input.collectionSize, - - "Blend_My_NFTs_Output": input.Blend_My_NFTs_Output, - "batch_json_save_path": input.batch_json_save_path, - "nftBatch_save_path": input.nftBatch_save_path, - - "enableImages": input.enableImages, - "imageFileFormat": input.imageFileFormat, - - "enableAnimations": input.enableAnimations, - "animationFileFormat": input.animationFileFormat, - - "enableModelsBlender": input.enableModelsBlender, - "modelFileFormat": input.modelFileFormat, - - "enableCustomFields": input.enableCustomFields, - - "cardanoMetaDataBool": input.cardanoMetaDataBool, - "solanaMetaDataBool": input.solanaMetaDataBool, - "erc721MetaData": input.erc721MetaData, - - "cardano_description": input.cardano_description, - "solana_description": input.solana_description, - "erc721_description": input.erc721_description, - - "enableMaterials": input.enableMaterials, - "materialsFile": input.materialsFile, - - "enableLogic": input.enableLogic, - "enable_Logic_Json": input.enable_Logic_Json, - "logicFile": input.logicFile, - - "enableRarity": input.enableRarity, - - "enableAutoShutdown": input.enableAutoShutdown, - - "specify_timeBool": input.specify_timeBool, - "hours": input.hours, - "minutes": input.minutes, - - "emailNotificationBool": input.emailNotificationBool, - "sender_from": input.sender_from, - "email_password": input.email_password, - "receiver_to": input.receiver_to, - - "custom_Fields": input.custom_Fields, - }, - }) - - save_batch(batch, file_name) - - -def save_completed(full_single_dna, a, x, batch_json_save_path, batchToGenerate): - """Saves progress of rendering to batch.json file.""" - - file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batchToGenerate)) - batch = json.load(open(file_name)) - index = batch["BatchDNAList"].index(a) - batch["BatchDNAList"][index][full_single_dna]["Complete"] = True - batch["Generation Save"][-1]["DNA Generated"] = x - - save_batch(batch, file_name) - - -# Exporter functions: -def getBatchData(batchToGenerate, batch_json_save_path): - """ - Retrieves a given batches data determined by renderBatch in config.py - """ - - file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batchToGenerate)) - batch = json.load(open(file_name)) - - NFTs_in_Batch = batch["NFTs_in_Batch"] - hierarchy = batch["hierarchy"] - BatchDNAList = batch["BatchDNAList"] - - return NFTs_in_Batch, hierarchy, BatchDNAList - - -def render_and_save_NFTs(input): - """ - Renders the NFT DNA in a Batch#.json, where # is renderBatch in config.py. Turns off the viewport camera and - the render camera for all items in hierarchy. - """ - - time_start_1 = time.time() - - # If failed Batch is detected and user is resuming its generation: - if input.fail_state: - print(f"{TextColors.ERROR}\nResuming Batch #{input.failed_batch}\n{TextColors.RESET}") - NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.failed_batch, input.batch_json_save_path) - for a in range(input.failed_dna): - del BatchDNAList[0] - x = input.failed_dna + 1 - - # If user is generating the normal way: - else: - print(f"\nGenerating Batch #{input.batchToGenerate}\n") - NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.batchToGenerate, input.batch_json_save_path) - save_generation_state(input) - x = 1 - - if input.enableMaterials: - materialsFile = json.load(open(input.materialsFile)) - - for a in BatchDNAList: - full_single_dna = list(a.keys())[0] - Order_Num = a[full_single_dna]['Order_Num'] - - # Material handling: - if input.enableMaterials: - single_dna, material_dna = full_single_dna.split(':') - - if not input.enableMaterials: - single_dna = full_single_dna - - def match_DNA_to_Variant(single_dna): - """ - Matches each DNA number separated by "-" to its attribute, then its variant. - """ - - listAttributes = list(hierarchy.keys()) - listDnaDecunstructed = single_dna.split('-') - dnaDictionary = {} - - for i, j in zip(listAttributes, listDnaDecunstructed): - dnaDictionary[i] = j - - for x in dnaDictionary: - for k in hierarchy[x]: - kNum = hierarchy[x][k]["number"] - if kNum == dnaDictionary[x]: - dnaDictionary.update({x: k}) - return dnaDictionary - - def match_materialDNA_to_Material(single_dna, material_dna, materialsFile): - """ - Matches the Material DNA to it's selected Materials unless a 0 is present meaning no material for that variant was selected. - """ - listAttributes = list(hierarchy.keys()) - listDnaDecunstructed = single_dna.split('-') - listMaterialDNADeconstructed = material_dna.split('-') - - full_dna_dict = {} - - for attribute, variant, material in zip(listAttributes, listDnaDecunstructed, listMaterialDNADeconstructed): - - for var in hierarchy[attribute]: - if hierarchy[attribute][var]['number'] == variant: - variant = var - - if material != '0': # If material is not empty - for variant_m in materialsFile: - if variant == variant_m: - # Getting Materials name from Materials index in the Materials List - materials_list = list(materialsFile[variant_m]["Material List"].keys()) - - material = materials_list[int(material) - 1] # Subtract 1 because '0' means empty mat - break - - full_dna_dict[variant] = material - - return full_dna_dict - - metadataMaterialDict = {} - - if input.enableMaterials: - materialdnaDictionary = match_materialDNA_to_Material(single_dna, material_dna, materialsFile) - - for var_mat in list(materialdnaDictionary.keys()): - if materialdnaDictionary[var_mat] != '0': - if not materialsFile[var_mat]['Variant Objects']: - """ - If objects to apply material to not specified, apply to all objects in Variant collection. - """ - metadataMaterialDict[var_mat] = materialdnaDictionary[var_mat] - - for obj in bpy.data.collections[var_mat].all_objects: - selected_object = bpy.data.objects.get(obj.name) - selected_object.active_material = bpy.data.materials[materialdnaDictionary[var_mat]] - - if materialsFile[var_mat]['Variant Objects']: - """ - If objects to apply material to are specified, apply material only to objects specified withing the Variant collection. - """ - metadataMaterialDict[var_mat] = materialdnaDictionary[var_mat] - - for obj in materialsFile[var_mat]['Variant Objects']: - selected_object = bpy.data.objects.get(obj) - selected_object.active_material = bpy.data.materials[materialdnaDictionary[var_mat]] - - # Turn off render camera and viewport camera for all collections in hierarchy - for i in hierarchy: - for j in hierarchy[i]: - try: - bpy.data.collections[j].hide_render = True - bpy.data.collections[j].hide_viewport = True - except KeyError: - raise TypeError( - f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes to " - f"your .blned file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read your scene." - f"For more information see:{TextColors.RESET}" - f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - - dnaDictionary = match_DNA_to_Variant(single_dna) - name = input.nftName + "_" + str(Order_Num) - - # Change Text Object in Scene to match DNA string: - # Variables that can be used: full_single_dna, name, Order_Num - # ob = bpy.data.objects['Text'] # Object name - # ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob - - print(f"\n{TextColors.OK}======== Generating NFT {x}/{NFTs_in_Batch}: {name} ========{TextColors.RESET}") - print(f"\nVariants selected:") - print(f"{dnaDictionary}") - if input.enableMaterials: - print(f"\nMaterials selected:") - print(f"{materialdnaDictionary}") - - print(f"\nDNA Code:{full_single_dna}") - - for c in dnaDictionary: - collection = dnaDictionary[c] - if collection != '0': - bpy.data.collections[collection].hide_render = False - bpy.data.collections[collection].hide_viewport = False - - time_start_2 = time.time() - - # Main paths for batch sub-folders: - batchFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate)) - - imageFolder = os.path.join(batchFolder, "Images") - animationFolder = os.path.join(batchFolder, "Animations") - modelFolder = os.path.join(batchFolder, "Models") - BMNFT_metaData_Folder = os.path.join(batchFolder, "BMNFT_metadata") - - imagePath = os.path.join(imageFolder, name) - animationPath = os.path.join(animationFolder, name) - modelPath = os.path.join(modelFolder, name) - - cardanoMetadataPath = os.path.join(batchFolder, "Cardano_metadata") - solanaMetadataPath = os.path.join(batchFolder, "Solana_metadata") - erc721MetadataPath = os.path.join(batchFolder, "Erc721_metadata") - - - def check_failed_exists(file_path): - # Delete a file if a fail state is detected and if the file being re-generated already exists. Prevents - # animations from corrupting. - - if input.fail_state: - if os.path.exists(file_path): - os.remove(file_path) - - # Generation/Rendering: - if input.enableImages: - - print(f"{TextColors.OK}---Image---{TextColors.RESET}") - - image_render_time_start = time.time() - - check_failed_exists(imagePath) - - def render_image(): - if not os.path.exists(imageFolder): - os.makedirs(imageFolder) - - bpy.context.scene.render.filepath = imagePath - bpy.context.scene.render.image_settings.file_format = input.imageFileFormat - bpy.ops.render.render(write_still=True) - - # Loading Animation: - loading = Loader(f'Rendering Image {x}/{NFTs_in_Batch}...', '').start() - render_image() - loading.stop() - - image_render_time_end = time.time() - - print( - f"{TextColors.OK}Rendered image in {image_render_time_end - image_render_time_start}s.\n{TextColors.RESET}" - ) - - if input.enableAnimations: - print(f"{TextColors.OK}---Animation---{TextColors.RESET}") - - animation_render_time_start = time.time() - - check_failed_exists(animationPath) - - def render_animation(): - if not os.path.exists(animationFolder): - os.makedirs(animationFolder) - - if input.animationFileFormat == "MP4": - bpy.context.scene.render.filepath = animationPath - bpy.context.scene.render.image_settings.file_format = "FFMPEG" - - bpy.context.scene.render.ffmpeg.format = 'MPEG4' - bpy.context.scene.render.ffmpeg.codec = 'H264' - bpy.ops.render.render(animation=True) - - elif input.animationFileFormat == 'PNG': - if not os.path.exists(animationPath): - os.makedirs(animationPath) - - bpy.context.scene.render.filepath = os.path.join(animationPath, name) - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat - bpy.ops.render.render(animation=True) - - elif input.animationFileFormat == 'TIFF': - if not os.path.exists(animationPath): - os.makedirs(animationPath) - - bpy.context.scene.render.filepath = os.path.join(animationPath, name) - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat - bpy.ops.render.render(animation=True) - - else: - bpy.context.scene.render.filepath = animationPath - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat - bpy.ops.render.render(animation=True) - - # Loading Animation: - loading = Loader(f'Rendering Animation {x}/{NFTs_in_Batch}...', '').start() - render_animation() - loading.stop() - - animation_render_time_end = time.time() - - print( - f"{TextColors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s.\n{TextColors.RESET}" - ) - - if input.enableModelsBlender: - print(f"{TextColors.OK}---3D Model---{TextColors.RESET}") - - model_generation_time_start = time.time() - - def generate_models(): - if not os.path.exists(modelFolder): - os.makedirs(modelFolder) - - for i in dnaDictionary: - coll = dnaDictionary[i] - if coll != '0': - for obj in bpy.data.collections[coll].all_objects: - obj.select_set(True) - - for obj in bpy.data.collections['Script_Ignore'].all_objects: - obj.select_set(True) - - # Remove objects from 3D model export: - # remove_objects: list = [ - # ] - # - # for obj in bpy.data.objects: - # if obj.name in remove_objects: - # obj.select_set(False) - - if input.modelFileFormat == 'GLB': - check_failed_exists(f"{modelPath}.glb") - bpy.ops.export_scene.gltf(filepath=f"{modelPath}.glb", - check_existing=True, - export_format='GLB', - export_keep_originals=True, - use_selection=True) - if input.modelFileFormat == 'GLTF_SEPARATE': - check_failed_exists(f"{modelPath}.gltf") - check_failed_exists(f"{modelPath}.bin") - bpy.ops.export_scene.gltf(filepath=f"{modelPath}", - check_existing=True, - export_format='GLTF_SEPARATE', - export_keep_originals=True, - use_selection=True) - if input.modelFileFormat == 'GLTF_EMBEDDED': - check_failed_exists(f"{modelPath}.gltf") - bpy.ops.export_scene.gltf(filepath=f"{modelPath}.gltf", - check_existing=True, - export_format='GLTF_EMBEDDED', - export_keep_originals=True, - use_selection=True) - elif input.modelFileFormat == 'FBX': - check_failed_exists(f"{modelPath}.fbx") - bpy.ops.export_scene.fbx(filepath=f"{modelPath}.fbx", - check_existing=True, - use_selection=True) - elif input.modelFileFormat == 'OBJ': - check_failed_exists(f"{modelPath}.obj") - bpy.ops.export_scene.obj(filepath=f"{modelPath}.obj", - check_existing=True, - use_selection=True, ) - elif input.modelFileFormat == 'X3D': - check_failed_exists(f"{modelPath}.x3d") - bpy.ops.export_scene.x3d(filepath=f"{modelPath}.x3d", - check_existing=True, - use_selection=True) - elif input.modelFileFormat == 'STL': - check_failed_exists(f"{modelPath}.stl") - bpy.ops.export_mesh.stl(filepath=f"{modelPath}.stl", - check_existing=True, - use_selection=True) - elif input.modelFileFormat == 'VOX': - check_failed_exists(f"{modelPath}.vox") - bpy.ops.export_vox.some_data(filepath=f"{modelPath}.vox") - - # Loading Animation: - loading = Loader(f'Generating 3D model {x}/{NFTs_in_Batch}...', '').start() - generate_models() - loading.stop() - - model_generation_time_end = time.time() - - print( - f"{TextColors.OK}Generated 3D model in {model_generation_time_end - model_generation_time_start}s.\n{TextColors.RESET}" - ) - - # Generating Metadata: - if input.cardanoMetaDataBool: - if not os.path.exists(cardanoMetadataPath): - os.makedirs(cardanoMetadataPath) - createCardanoMetadata(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict, - input.custom_Fields, - input.enableCustomFields, input.cardano_description, cardanoMetadataPath) - - if input.solanaMetaDataBool: - if not os.path.exists(solanaMetadataPath): - os.makedirs(solanaMetadataPath) - createSolanaMetaData(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict, - input.custom_Fields, - input.enableCustomFields, input.solana_description, solanaMetadataPath) - - if input.erc721MetaData: - if not os.path.exists(erc721MetadataPath): - os.makedirs(erc721MetadataPath) - createErc721MetaData(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict, - input.custom_Fields, - input.enableCustomFields, input.erc721_description, erc721MetadataPath) - - if not os.path.exists(BMNFT_metaData_Folder): - os.makedirs(BMNFT_metaData_Folder) - - for b in dnaDictionary: - if dnaDictionary[b] == "0": - dnaDictionary[b] = "Empty" - - metaDataDict = {"name": name, "NFT_DNA": a, "NFT_Variants": dnaDictionary, - "Material_Attributes": metadataMaterialDict} - - jsonMetaData = json.dumps(metaDataDict, indent=1, ensure_ascii=True) - - with open(os.path.join(BMNFT_metaData_Folder, "Data_" + name + ".json"), 'w') as outfile: - outfile.write(jsonMetaData + '\n') - - print(f"Completed {name} render in {time.time() - time_start_2}s") - - save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batchToGenerate) - - x += 1 - - for i in hierarchy: - for j in hierarchy[i]: - bpy.data.collections[j].hide_render = False - bpy.data.collections[j].hide_viewport = False - - batch_complete_time = time.time() - time_start_1 - - print(f"\nAll NFTs successfully generated and sent to {input.nftBatch_save_path}" - f"\nCompleted all renders in Batch{input.batchToGenerate}.json in {batch_complete_time}s\n") - - batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1, - "Average time per generation": batch_complete_time / x - 1} - - batch_infoFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate), "batch_info.json") - save_batch(batch_info, batch_infoFolder) - - # Send Email that Batch is complete: - if input.emailNotificationBool: - port = 465 # For SSL - smtp_server = "smtp.gmail.com" - sender_email = input.sender_from # Enter your address - receiver_email = input.receiver_to # Enter receiver address - password = input.email_password - - # Get batch info for message: - if input.fail_state: - batch = input.fail_state - batchData = getBatchData(input.failed_batch, input.batch_json_save_path) - - else: - batchData = getBatchData(input.batchToGenerate, input.batch_json_save_path) - - batch = input.batchToGenerate - - generation_time = str(datetime.timedelta(seconds=batch_complete_time)) - - message = f"""\ - Subject: Batch {batch} completed {x - 1} NFTs in {generation_time} (h:m:s) - - Generation Time: - {generation_time.split(':')[0]} Hours, {generation_time.split(':')[1]} Minutes, {generation_time.split(':')[2]} Seconds - Batch Data: - - {batchData} - - This message was sent from an instance of the Blend_My_NFTs Blender add-on. - """ - - context = ssl.create_default_context() - with smtplib.SMTP_SSL(smtp_server, port, context=context) as server: - server.login(sender_email, password) - server.sendmail(sender_email, receiver_email, message) - - # Automatic Shutdown: - # If user selects automatic shutdown but did not specify time after Batch completion - def shutdown(time): - plateform = platform.system() - - if plateform == "Windows": - os.system(f"shutdown /s /t {time}") - if plateform == "Darwin": - os.system(f"shutdown /s /t {time}") - - if input.enableAutoShutdown and not input.specify_timeBool: - shutdown(0) - - # If user selects automatic shutdown and specify time after Batch completion - if input.enableAutoShutdown and input.specify_timeBool: - hours = (int(input.hours) / 60) / 60 - minutes = int(input.minutes) / 60 - total_sleep_time = hours + minutes - - # time.sleep(total_sleep_time) - - shutdown(total_sleep_time) diff --git a/main/DNA_Generator.py b/main/dna_generator.py similarity index 92% rename from main/DNA_Generator.py rename to main/dna_generator.py index 6d62d87..289d56b 100644 --- a/main/DNA_Generator.py +++ b/main/dna_generator.py @@ -7,8 +7,8 @@ import json import random import traceback from functools import partial -from . import Logic, Material_Generator, Helpers -from .Helpers import TextColors +from . import logic, material_generator, helpers +from .helpers import TextColors def generate_nft_dna( @@ -24,7 +24,7 @@ def generate_nft_dna( Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the dna_list. """ - hierarchy = Helpers.get_hierarchy() + hierarchy = helpers.get_hierarchy() # DNA random, Rarity and Logic methods: data_dictionary = {} @@ -110,11 +110,11 @@ def generate_nft_dna( # print(f"Rarity DNA: {single_dna}") if enable_logic: - single_dna = Logic.logicafyDNAsingle(hierarchy, single_dna, logic_file, enable_rarity, enable_materials) + single_dna = logic.logicafyDNAsingle(hierarchy, single_dna, logic_file, enable_rarity, enable_materials) # print(f"Logic DNA: {single_dna}") if enable_materials: - single_dna = Material_Generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity) + single_dna = material_generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity) # print(f"Materials DNA: {single_dna}") # print("============\n") @@ -153,7 +153,7 @@ def generate_nft_dna( # Messages: - Helpers.raise_warning_collection_size(dna_list, collection_size) + helpers.raise_warning_collection_size(dna_list, collection_size) # Data stored in batchDataDictionary: data_dictionary["num_nfts_generated"] = len(dna_list) @@ -244,7 +244,7 @@ def send_to_record( """ # Checking Scene is compatible with BMNFTs: - Helpers.check_scene() + helpers.check_scene() # Messages: print( @@ -285,12 +285,12 @@ def send_to_record( nft_record_save_path = os.path.join(blend_my_nfts_output, "NFTRecord.json") # Checks: - Helpers.raise_warning_max_nfts(nfts_per_batch, collection_size) - Helpers.check_duplicates(data_dictionary["dna_list"]) - Helpers.raise_error_zero_combinations() + helpers.raise_warning_max_nfts(nfts_per_batch, collection_size) + helpers.check_duplicates(data_dictionary["dna_list"]) + helpers.raise_error_zero_combinations() if enable_rarity: - Helpers.check_rarity(data_dictionary["hierarchy"], data_dictionary["dna_list"], + helpers.check_rarity(data_dictionary["hierarchy"], data_dictionary["dna_list"], os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data")) except FileNotFoundError: @@ -316,15 +316,15 @@ def send_to_record( except Exception: traceback.print_exc() raise ( - f"\n{Helpers.TextColors.ERROR}Blend_My_NFTs Error:\n" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " f"the naming conventions and scene structure. For more information, " - f"see:\n{Helpers.TextColors.RESET}" + f"see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) # Loading Animation: - loading = Helpers.Loader(f'Creating NFT DNA...', '').start() + loading = helpers.Loader(f'Creating NFT DNA...', '').start() create_nft_data() make_batches(collection_size, nfts_per_batch, save_path, batch_json_save_path) loading.stop() @@ -332,5 +332,5 @@ def send_to_record( time_end = time.time() print( - f"{Helpers.TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{Helpers.TextColors.RESET}" + f"{TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{TextColors.RESET}" ) diff --git a/main/exporter.py b/main/exporter.py new file mode 100644 index 0000000..0c640e0 --- /dev/null +++ b/main/exporter.py @@ -0,0 +1,641 @@ +# Purpose: +# This file takes a given Batch created by dna_generator.py and tells blender to render the image or export a 3D model +# to the NFT_Output folder. + +import bpy +import os +import ssl +import time +import json +import smtplib +import datetime +import platform + +from .helpers import TextColors, Loader +from .metadata_templates import createCardanoMetadata, createSolanaMetaData, createErc721MetaData + + +# Save info +def save_batch(batch, file_name): + saved_batch = json.dumps(batch, indent=1, ensure_ascii=True) + + with open(os.path.join(file_name), 'w') as outfile: + outfile.write(saved_batch + '\n') + + +def save_generation_state(input): + """ + Saves date and time of generation start, and generation types; Images, Animations, 3D Models, and the file types for + each. + """ + file_name = os.path.join(input.batch_json_save_path, "Batch{}.json".format(input.batchToGenerate)) + batch = json.load(open(file_name)) + + current_time = datetime.datetime.now().strftime("%H:%M:%S") + current_date = datetime.datetime.now().strftime("%d/%m/%Y") + local_timezone = str(datetime.datetime.now(datetime.timezone.utc)) + + if "Generation Save" in batch: + batch_save_number = int(batch[f"Generation Save"].index(batch[f"Generation Save"][-1])) + else: + batch_save_number = 0 + + batch["Generation Save"] = list() + batch["Generation Save"].append({ + "Batch Save Number": batch_save_number + 1, + "DNA Generated": None, + "Generation Start Date and Time": [current_time, current_date, local_timezone], + "Render_Settings": { + "nftName": input.nftName, + "save_path": input.save_path, + "nftsPerBatch": input.nftsPerBatch, + "batch_to_generate": input.batchToGenerate, + "collectionSize": input.collectionSize, + + "Blend_My_NFTs_Output": input.Blend_My_NFTs_Output, + "batch_json_save_path": input.batch_json_save_path, + "nftBatch_save_path": input.nftBatch_save_path, + + "enableImages": input.enableImages, + "imageFileFormat": input.imageFileFormat, + + "enableAnimations": input.enableAnimations, + "animationFileFormat": input.animationFileFormat, + + "enableModelsBlender": input.enableModelsBlender, + "modelFileFormat": input.modelFileFormat, + + "enableCustomFields": input.enableCustomFields, + + "cardanoMetaDataBool": input.cardanoMetaDataBool, + "solanaMetaDataBool": input.solanaMetaDataBool, + "erc721MetaData": input.erc721MetaData, + + "cardano_description": input.cardano_description, + "solana_description": input.solana_description, + "erc721_description": input.erc721_description, + + "enableMaterials": input.enableMaterials, + "materialsFile": input.materialsFile, + + "enableLogic": input.enableLogic, + "enable_Logic_Json": input.enable_Logic_Json, + "logicFile": input.logicFile, + + "enableRarity": input.enableRarity, + + "enableAutoShutdown": input.enableAutoShutdown, + + "specify_timeBool": input.specify_timeBool, + "hours": input.hours, + "minutes": input.minutes, + + "emailNotificationBool": input.emailNotificationBool, + "sender_from": input.sender_from, + "email_password": input.email_password, + "receiver_to": input.receiver_to, + + "custom_Fields": input.custom_Fields, + }, + }) + + save_batch(batch, file_name) + + +def save_completed(full_single_dna, a, x, batch_json_save_path, batch_to_generate): + """Saves progress of rendering to batch.json file.""" + + file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batch_to_generate)) + batch = json.load(open(file_name)) + index = batch["BatchDNAList"].index(a) + batch["BatchDNAList"][index][full_single_dna]["Complete"] = True + batch["Generation Save"][-1]["DNA Generated"] = x + + save_batch(batch, file_name) + + +# Exporter functions: +def get_batch_data(batch_to_generate, batch_json_save_path): + """ + Retrieves a given batches data determined by renderBatch in config.py + """ + + file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batch_to_generate)) + batch = json.load(open(file_name)) + + nfts_in_batch = batch["nfts_in_batch"] + hierarchy = batch["hierarchy"] + batch_dna_list = batch["batch_dna_list"] + + return nfts_in_batch, hierarchy, batch_dna_list + + +def render_and_save_nfts(input): + """ + Renders the NFT DNA in a Batch#.json, where # is renderBatch in config.py. Turns off the viewport camera and + the render camera for all items in hierarchy. + """ + + time_start_1 = time.time() + + # If failed Batch is detected and user is resuming its generation: + if input.fail_state: + print(f"{TextColors.ERROR}\nResuming Batch #{input.failed_batch}\n{TextColors.RESET}") + nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.failed_batch, input.batch_json_save_path) + for a in range(input.failed_dna): + del batch_dna_list[0] + x = input.failed_dna + 1 + + # If user is generating the normal way: + else: + print(f"\nGenerating Batch #{input.batchToGenerate}\n") + nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.batchToGenerate, input.batch_json_save_path) + save_generation_state(input) + x = 1 + + if input.enableMaterials: + materials_file = json.load(open(input.materialsFile)) + + for a in batch_dna_list: + full_single_dna = list(a.keys())[0] + order_num = a[full_single_dna]['order_num'] + + # Material handling: + if input.enableMaterials: + single_dna, material_dna = full_single_dna.split(':') + + if not input.enableMaterials: + single_dna = full_single_dna + + def match_dna_to_variant(single_dna): + """ + Matches each DNA number separated by "-" to its attribute, then its variant. + """ + + list_attributes = list(hierarchy.keys()) + list_dna_deconstructed = single_dna.split('-') + dna_dictionary = {} + + for i, j in zip(list_attributes, list_dna_deconstructed): + dna_dictionary[i] = j + + for x in dna_dictionary: + for k in hierarchy[x]: + k_num = hierarchy[x][k]["number"] + if k_num == dna_dictionary[x]: + dna_dictionary.update({x: k}) + return dna_dictionary + + def match_material_dna_to_material(single_dna, material_dna, materials_file): + """ + Matches the Material DNA to it's selected Materials unless a 0 is present meaning no material for that variant was selected. + """ + list_attributes = list(hierarchy.keys()) + list_dna_deconstructed = single_dna.split('-') + list_material_dna_deconstructed = material_dna.split('-') + + full_dna_dict = {} + + for attribute, variant, material in zip( + list_attributes, + list_dna_deconstructed, + list_material_dna_deconstructed + ): + + for var in hierarchy[attribute]: + if hierarchy[attribute][var]['number'] == variant: + variant = var + + if material != '0': # If material is not empty + for variant_m in materials_file: + if variant == variant_m: + # Getting Materials name from Materials index in the Materials List + materials_list = list(materials_file[variant_m]["Material List"].keys()) + + material = materials_list[int(material) - 1] # Subtract 1 because '0' means empty mat + break + + full_dna_dict[variant] = material + + return full_dna_dict + + metadata_material_dict = {} + + if input.enableMaterials: + material_dna_dictionary = match_material_dna_to_material(single_dna, material_dna, materials_file) + + for var_mat in list(material_dna_dictionary.keys()): + if material_dna_dictionary[var_mat]!='0': + if not materials_file[var_mat]['Variant Objects']: + """ + If objects to apply material to not specified, apply to all objects in Variant collection. + """ + metadata_material_dict[var_mat] = material_dna_dictionary[var_mat] + + for obj in bpy.data.collections[var_mat].all_objects: + selected_object = bpy.data.objects.get(obj.name) + selected_object.active_material = bpy.data.materials[material_dna_dictionary[var_mat]] + + if materials_file[var_mat]['Variant Objects']: + """ + If objects to apply material to are specified, apply material only to objects specified withing + the Variant collection. + """ + metadata_material_dict[var_mat] = material_dna_dictionary[var_mat] + + for obj in materials_file[var_mat]['Variant Objects']: + selected_object = bpy.data.objects.get(obj) + selected_object.active_material = bpy.data.materials[material_dna_dictionary[var_mat]] + + # Turn off render camera and viewport camera for all collections in hierarchy + for i in hierarchy: + for j in hierarchy[i]: + try: + bpy.data.collections[j].hide_render = True + bpy.data.collections[j].hide_viewport = True + except KeyError: + raise TypeError( + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes to " + f"your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read your " + f"scene. For more information see:{TextColors.RESET}" + f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + + dna_dictionary = match_dna_to_variant(single_dna) + name = input.nftName + "_" + str(order_num) + + # Change Text Object in Scene to match DNA string: + # Variables that can be used: full_single_dna, name, order_num + # ob = bpy.data.objects['Text'] # Object name + # ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob + + print(f"\n{TextColors.OK}======== Generating NFT {x}/{nfts_in_batch}: {name} ========{TextColors.RESET}") + print(f"\nVariants selected:") + print(f"{dna_dictionary}") + if input.enableMaterials: + print(f"\nMaterials selected:") + print(f"{material_dna_dictionary}") + + print(f"\nDNA Code:{full_single_dna}") + + for c in dna_dictionary: + collection = dna_dictionary[c] + if collection != '0': + bpy.data.collections[collection].hide_render = False + bpy.data.collections[collection].hide_viewport = False + + time_start_2 = time.time() + + # Main paths for batch sub-folders: + batch_folder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate)) + + image_folder = os.path.join(batch_folder, "Images") + animation_folder = os.path.join(batch_folder, "Animations") + model_folder = os.path.join(batch_folder, "Models") + bmnft_data_folder = os.path.join(batch_folder, "BMNFT_data") + + image_path = os.path.join(image_folder, name) + animation_path = os.path.join(animation_folder, name) + model_path = os.path.join(model_folder, name) + + cardano_metadata_path = os.path.join(batch_folder, "Cardano_metadata") + solana_metadata_path = os.path.join(batch_folder, "Solana_metadata") + erc721_metadata_path = os.path.join(batch_folder, "Erc721_metadata") + + def check_failed_exists(file_path): + """ + Delete a file if a fail state is detected and if the file being re-generated already exists. Prevents + animations from corrupting. + """ + if input.fail_state: + if os.path.exists(file_path): + os.remove(file_path) + + # Generation/Rendering: + if input.enableImages: + + print(f"{TextColors.OK}-------- Image --------{TextColors.RESET}") + + image_render_time_start = time.time() + + check_failed_exists(image_path) + + def render_image(): + if not os.path.exists(image_folder): + os.makedirs(image_folder) + + bpy.context.scene.render.filepath = image_path + bpy.context.scene.render.image_settings.file_format = input.imageFileFormat + bpy.ops.render.render(write_still=True) + + # Loading Animation: + loading = Loader(f'Rendering Image {x}/{nfts_in_batch}...', '').start() + render_image() + loading.stop() + + image_render_time_end = time.time() + + print( + f"{TextColors.OK}Rendered image in {image_render_time_end - image_render_time_start}s." + f"\n{TextColors.RESET}" + ) + + if input.enableAnimations: + print(f"{TextColors.OK}-------- Animation --------{TextColors.RESET}") + + animation_render_time_start = time.time() + + check_failed_exists(animation_path) + + def render_animation(): + if not os.path.exists(animation_folder): + os.makedirs(animation_folder) + + if input.animationFileFormat == "MP4": + bpy.context.scene.render.filepath = animation_path + bpy.context.scene.render.image_settings.file_format = "FFMPEG" + + bpy.context.scene.render.ffmpeg.format = 'MPEG4' + bpy.context.scene.render.ffmpeg.codec = 'H264' + bpy.ops.render.render(animation=True) + + elif input.animationFileFormat == 'PNG': + if not os.path.exists(animation_path): + os.makedirs(animation_path) + + bpy.context.scene.render.filepath = os.path.join(animation_path, name) + bpy.context.scene.render.image_settings.file_format = input.animationFileFormat + bpy.ops.render.render(animation=True) + + elif input.animationFileFormat == 'TIFF': + if not os.path.exists(animation_path): + os.makedirs(animation_path) + + bpy.context.scene.render.filepath = os.path.join(animation_path, name) + bpy.context.scene.render.image_settings.file_format = input.animationFileFormat + bpy.ops.render.render(animation=True) + + else: + bpy.context.scene.render.filepath = animation_path + bpy.context.scene.render.image_settings.file_format = input.animationFileFormat + bpy.ops.render.render(animation=True) + + # Loading Animation: + loading = Loader(f'Rendering Animation {x}/{nfts_in_batch}...', '').start() + render_animation() + loading.stop() + + animation_render_time_end = time.time() + + print( + f"{TextColors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s." + f"\n{TextColors.RESET}" + ) + + if input.enableModelsBlender: + print(f"{TextColors.OK}-------- 3D Model --------{TextColors.RESET}") + + model_generation_time_start = time.time() + + def generate_models(): + if not os.path.exists(model_folder): + os.makedirs(model_folder) + + for i in dna_dictionary: + coll = dna_dictionary[i] + if coll != '0': + for obj in bpy.data.collections[coll].all_objects: + obj.select_set(True) + + for obj in bpy.data.collections['Script_Ignore'].all_objects: + obj.select_set(True) + + # Remove objects from 3D model export: + # remove_objects: list = [ + # ] + # + # for obj in bpy.data.objects: + # if obj.name in remove_objects: + # obj.select_set(False) + + if input.modelFileFormat == 'GLB': + check_failed_exists(f"{model_path}.glb") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}.glb", + check_existing=True, + export_format='GLB', + export_keep_originals=True, + use_selection=True + ) + if input.modelFileFormat == 'GLTF_SEPARATE': + check_failed_exists(f"{model_path}.gltf") + check_failed_exists(f"{model_path}.bin") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}", + check_existing=True, + export_format='GLTF_SEPARATE', + export_keep_originals=True, + use_selection=True + ) + if input.modelFileFormat == 'GLTF_EMBEDDED': + check_failed_exists(f"{model_path}.gltf") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}.gltf", + check_existing=True, + export_format='GLTF_EMBEDDED', + export_keep_originals=True, + use_selection=True + ) + elif input.modelFileFormat == 'FBX': + check_failed_exists(f"{model_path}.fbx") + bpy.ops.export_scene.fbx( + filepath=f"{model_path}.fbx", + check_existing=True, + use_selection=True + ) + elif input.modelFileFormat == 'OBJ': + check_failed_exists(f"{model_path}.obj") + bpy.ops.export_scene.obj( + filepath=f"{model_path}.obj", + check_existing=True, + use_selection=True, + ) + elif input.modelFileFormat == 'X3D': + check_failed_exists(f"{model_path}.x3d") + bpy.ops.export_scene.x3d( + filepath=f"{model_path}.x3d", + check_existing=True, + use_selection=True + ) + elif input.modelFileFormat == 'STL': + check_failed_exists(f"{model_path}.stl") + bpy.ops.export_mesh.stl( + filepath=f"{model_path}.stl", + check_existing=True, + use_selection=True + ) + elif input.modelFileFormat == 'VOX': + check_failed_exists(f"{model_path}.vox") + bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox") + + # Loading Animation: + loading = Loader(f'Generating 3D model {x}/{nfts_in_batch}...', '').start() + generate_models() + loading.stop() + + model_generation_time_end = time.time() + + print( + f"{TextColors.OK}Generated 3D model in {model_generation_time_end - model_generation_time_start}s." + f"\n{TextColors.RESET}" + ) + + # Generating Metadata: + if input.cardanoMetaDataBool: + if not os.path.exists(cardano_metadata_path): + os.makedirs(cardano_metadata_path) + createCardanoMetadata( + name, + order_num, + full_single_dna, + dna_dictionary, + metadata_material_dict, + input.custom_Fields, + input.enableCustomFields, + input.cardano_description, + cardano_metadata_path + ) + + if input.solanaMetaDataBool: + if not os.path.exists(solana_metadata_path): + os.makedirs(solana_metadata_path) + createSolanaMetaData( + name, + order_num, + full_single_dna, + dna_dictionary, + metadata_material_dict, + input.custom_Fields, + input.enableCustomFields, + input.solana_description, + solana_metadata_path + ) + + if input.erc721MetaData: + if not os.path.exists(erc721_metadata_path): + os.makedirs(erc721_metadata_path) + createErc721MetaData( + name, + order_num, + full_single_dna, + dna_dictionary, + metadata_material_dict, + input.custom_Fields, + input.enableCustomFields, + input.erc721_description, + erc721_metadata_path + ) + + if not os.path.exists(bmnft_data_folder): + os.makedirs(bmnft_data_folder) + + for b in dna_dictionary: + if dna_dictionary[b] == "0": + dna_dictionary[b] = "Empty" + + meta_data_dict = { + "name": name, + "nft_dna": a, + "nft_variants": dna_dictionary, + "material_attributes": metadata_material_dict + } + + json_meta_data = json.dumps(meta_data_dict, indent=1, ensure_ascii=True) + + with open(os.path.join(bmnft_data_folder, "Data_" + name + ".json"), 'w') as outfile: + outfile.write(json_meta_data + '\n') + + print(f"Completed {name} render in {time.time() - time_start_2}s") + + save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batchToGenerate) + + x += 1 + + for i in hierarchy: + for j in hierarchy[i]: + bpy.data.collections[j].hide_render = False + bpy.data.collections[j].hide_viewport = False + + batch_complete_time = time.time() - time_start_1 + + print(f"\nAll NFTs successfully generated and sent to {input.nftBatch_save_path}" + f"\nCompleted all renders in Batch{input.batchToGenerate}.json in {batch_complete_time}s\n") + + batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1, + "Average time per generation": batch_complete_time / x - 1} + + batch_info_folder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate), "batch_info.json") + save_batch(batch_info, batch_info_folder) + + # Send Email that Batch is complete: + if input.emailNotificationBool: + port = 465 # For SSL + smtp_server = "smtp.gmail.com" + sender_email = input.sender_from # Enter your address + receiver_email = input.receiver_to # Enter receiver address + password = input.email_password + + # Get batch info for message: + if input.fail_state: + batch = input.fail_state + batch_data = get_batch_data(input.failed_batch, input.batch_json_save_path) + + else: + batch_data = get_batch_data(input.batchToGenerate, input.batch_json_save_path) + + batch = input.batchToGenerate + + generation_time = str(datetime.timedelta(seconds=batch_complete_time)) + + message = f"""\ + Subject: Batch {batch} completed {x - 1} NFTs in {generation_time} (h:m:s) + + Generation Time: + {generation_time.split(':')[0]} Hours, + {generation_time.split(':')[1]} Minutes, + {generation_time.split(':')[2]} Seconds + Batch Data: + + {batch_data} + + This message was sent from an instance of the Blend_My_NFTs Blender add-on. + """ + + context = ssl.create_default_context() + with smtplib.SMTP_SSL(smtp_server, port, context=context) as server: + server.login(sender_email, password) + server.sendmail(sender_email, receiver_email, message) + + # Automatic Shutdown: + # If user selects automatic shutdown but did not specify time after Batch completion + def shutdown(time): + plateform = platform.system() + + if plateform == "Windows": + os.system(f"shutdown /s /t {time}") + if plateform == "Darwin": + os.system(f"shutdown /s /t {time}") + + if input.enableAutoShutdown and not input.specify_timeBool: + shutdown(0) + + # If user selects automatic shutdown and specify time after Batch completion + if input.enableAutoShutdown and input.specify_timeBool: + hours = (int(input.hours) / 60) / 60 + minutes = int(input.minutes) / 60 + total_sleep_time = hours + minutes + + # time.sleep(total_sleep_time) + + shutdown(total_sleep_time) diff --git a/main/HeadlessUtil.py b/main/headless_util.py similarity index 91% rename from main/HeadlessUtil.py rename to main/headless_util.py index 6b09303..6c4b283 100644 --- a/main/HeadlessUtil.py +++ b/main/headless_util.py @@ -1,6 +1,6 @@ -#adding CLI arguments -#Used this as a basis: -#https://developer.blender.org/diffusion/B/browse/master/release/scripts/templates_py/background_job.py +# adding CLI arguments +# Used this as a basis: +# https://developer.blender.org/diffusion/B/browse/master/release/scripts/templates_py/background_job.py import sys import argparse diff --git a/main/Helpers.py b/main/helpers.py similarity index 100% rename from main/Helpers.py rename to main/helpers.py diff --git a/main/Intermediate.py b/main/intermediate.py similarity index 95% rename from main/Intermediate.py rename to main/intermediate.py index 1dd6f43..323bf56 100644 --- a/main/Intermediate.py +++ b/main/intermediate.py @@ -1,7 +1,7 @@ import json import bpy -from main import DNA_Generator, Exporter +from main import dna_generator, exporter def send_To_Record_JSON(input, reverse_order=False): @@ -44,7 +44,7 @@ def send_To_Record_JSON(input, reverse_order=False): num += 1 - DNA_Generator.send_to_record(input.collectionSize, + dna_generator.send_to_record(input.collectionSize, input.nftsPerBatch, input.save_path, input.enableRarity, @@ -77,4 +77,4 @@ def render_and_save_NFTs(input, reverse_order=False): else: input.custom_Fields[item.field_name] = item.field_value - Exporter.render_and_save_NFTs(input) + Exporter.render_and_save_nfts(input) diff --git a/main/Logic.py b/main/logic.py similarity index 98% rename from main/Logic.py rename to main/logic.py index 2cc316d..d76f6a4 100644 --- a/main/Logic.py +++ b/main/logic.py @@ -1,11 +1,11 @@ # Purpose: -# The purpose of this file is to add logic and rules to the DNA that are sent to the NFTRecord.json file in DNA_Generator.py +# The purpose of this file is to add logic and rules to the DNA that are sent to the NFTRecord.json file in dna_generator.py import bpy import random import collections -from .Helpers import TextColors, removeList, remove_file_by_extension, save_result +from .helpers import TextColors, removeList, remove_file_by_extension, save_result def reconstructDNA(deconstructedDNA): diff --git a/main/Material_Generator.py b/main/material_generator.py similarity index 98% rename from main/Material_Generator.py rename to main/material_generator.py index 8c52f35..e0eda71 100644 --- a/main/Material_Generator.py +++ b/main/material_generator.py @@ -7,7 +7,7 @@ import bpy import json import random -from .Helpers import TextColors +from .helpers import TextColors def select_material(materialList, variant, enableRarity): diff --git a/main/Metadata.py b/main/metadata_templates.py similarity index 98% rename from main/Metadata.py rename to main/metadata_templates.py index 618e028..2d7b8ca 100644 --- a/main/Metadata.py +++ b/main/metadata_templates.py @@ -3,7 +3,7 @@ # https://discord.gg/QTT7dzcuVs # Purpose: -# This file returns the specified meta data format to the Exporter.py for a given NFT DNA. +# This file returns the specified meta data format to the exporter.py for a given NFT DNA. import bpy import os diff --git a/main/Refactorer.py b/main/refactorer.py similarity index 96% rename from main/Refactorer.py rename to main/refactorer.py index 0faa07f..d842e05 100644 --- a/main/Refactorer.py +++ b/main/refactorer.py @@ -6,7 +6,7 @@ import os import json import shutil -from .Helpers import TextColors, removeList, remove_file_by_extension +from .helpers import TextColors, removeList, remove_file_by_extension def reformatNFTCollection(refactor_panel_input):