From cabefc04d1b9f68d553d6a2fa839ae2a10b2f66a Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Wed, 24 Aug 2022 08:56:10 -0400 Subject: [PATCH 01/12] Reformatting and preparing for debug mode Created Helpers.py file and combined get_combinations.py, loading_animation.py, Constants.py, and Checks.py. --- __init__.py | 26 +++-- main/Constants.py | 51 --------- main/DNA_Generator.py | 31 +++--- main/Exporter.py | 4 +- main/{Checks.py => Helpers.py} | 198 ++++++++++++++++++++++++++++++--- main/Intermediate.py | 3 +- main/Logic.py | 2 +- main/Material_Generator.py | 2 +- main/Rarity.py | 2 +- main/Refactorer.py | 2 +- main/get_combinations.py | 26 ----- main/loading_animation.py | 69 ------------ 12 files changed, 217 insertions(+), 199 deletions(-) delete mode 100644 main/Constants.py rename main/{Checks.py => Helpers.py} (60%) delete mode 100644 main/get_combinations.py delete mode 100644 main/loading_animation.py diff --git a/__init__.py b/__init__.py index 0217ead..35f978a 100644 --- a/__init__.py +++ b/__init__.py @@ -36,13 +36,11 @@ sys.path.append(os.path.dirname(os.path.realpath(__file__))) # Local file imports: from main import \ - Checks, \ + Helpers, \ DNA_Generator, \ Exporter, \ - get_combinations, \ HeadlessUtil, \ Intermediate, \ - loading_animation, \ Logic, \ Material_Generator, \ Metadata, \ @@ -55,12 +53,10 @@ from UILists import \ if "bpy" in locals(): modules = { - "Checks": Checks, + "Helpers": Helpers, "DNA_Generator": DNA_Generator, "Exporter": Exporter, - "get_combinations": get_combinations, "HeadlessUtil": HeadlessUtil, - "loading_animation": loading_animation, "Intermediate": Intermediate, "Logic": Logic, "Material_Generator": Material_Generator, @@ -75,9 +71,9 @@ if "bpy" in locals(): if i in locals(): importlib.reload(modules[i]) -# ======== Persistant UI Refresh ======== # - +# ======== Persistent UI Refresh ======== # # Used for updating text and buttons in UI panels + combinations: int = 0 recommended_limit: int = 0 dt = datetime.now(timezone.utc).astimezone() # Date Time in UTC local @@ -92,7 +88,7 @@ def Refresh_UI(dummy1, dummy2): global combinations global recommended_limit - combinations = (get_combinations.get_combinations()) + combinations = (Helpers.get_combinations()) recommended_limit = int(round(combinations / 2)) # Add panel classes that require refresh to this refresh_panels tuple: @@ -165,6 +161,7 @@ class BMNFTData: sender_from: str email_password: str receiver_to: str + enable_debug: bool custom_Fields: dict = None fail_state: Any = False @@ -229,6 +226,7 @@ def getBMNFTData(): sender_from=bpy.context.scene.input_tool.sender_from, email_password=bpy.context.scene.input_tool.email_password, receiver_to=bpy.context.scene.input_tool.receiver_to, + enable_debug=bpy.context.scene.input_tool.enable_debug ) return data @@ -464,7 +462,6 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): enableAutoSave: bpy.props.BoolProperty(name="Auto Save Before Generation", description="Automatically saves your Blender file when 'Generate NFTs & Create Metadata' button is clicked") - # Auto Shutdown: enableAutoShutdown: bpy.props.BoolProperty(name="Auto Shutdown", description="Automatically shuts down your computer after a Batch is finished Generating") @@ -473,15 +470,16 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): hours: bpy.props.IntProperty(default=0, min=0) minutes: bpy.props.IntProperty(default=0, min=0) - # Send Batch Complete Email: emailNotificationBool: bpy.props.BoolProperty(name="Email Notifications", description="Receive Email Notifications from Blender once a batch is finished generating") sender_from: bpy.props.StringProperty(name="From", default="from@example.com") email_password: bpy.props.StringProperty(name="Password", subtype='PASSWORD') receiver_to: bpy.props.StringProperty(name="To", default="to@example.com") + enable_debug: bpy.props.BoolProperty(name="Enable Debug Mode", description="Allows you to run Blend_My_NFTs without generating any content files and includes more console information.") + # API Panel properties: - apiKey: bpy.props.StringProperty(name="API Key", subtype='PASSWORD') # Test code for future faetures + apiKey: bpy.props.StringProperty(name="API Key", subtype='PASSWORD') # Test code for future features # ======== Main Operators ======== # @@ -602,6 +600,7 @@ class resume_failed_batch(bpy.types.Operator): sender_from=render_settings["sender_from"], email_password=render_settings["email_password"], receiver_to=render_settings["receiver_to"], + enable_debug=render_settings["enable_debug"], fail_state=_fail_state, failed_batch=_failed_batch, @@ -1000,6 +999,9 @@ class BMNFTS_PT_Other(bpy.types.Panel): row = layout.row() layout.label(text=f"**Set a Save Path in Create NFT Data to Export Settings") + row = layout.row() + row.prop(input_tool_scene, "enable_debug") + row = layout.row() row = layout.row() diff --git a/main/Constants.py b/main/Constants.py deleted file mode 100644 index 3cc94c3..0000000 --- a/main/Constants.py +++ /dev/null @@ -1,51 +0,0 @@ -# Purpose: -# This file is for storing or updating constant values that may need to be changes depending on system requirements and -# different usecases. -import os -import json -import platform - -removeList = [".gitignore", ".DS_Store", "desktop.ini", ".ini"] - -def remove_file_by_extension(dirlist): - """ - Checks if a given directory list contains any of the files or file extensions listed above, if so, remove them from - list and return a clean dir list. These files interfer with BMNFTs operations and should be removed whenever dealing - with directories. - """ - - if str(type(dirlist)) == "": - dirlist = list(dirlist) # converts single string path to list if dir pasted as string - - return_dirs = [] - for directory in dirlist: - if not str(os.path.split(directory)[1]) in removeList: - return_dirs.append(directory) - - return return_dirs - - -class bcolors: - """ - The colour of console messages. - """ - - OK = '\033[92m' # GREEN - WARNING = '\033[93m' # YELLOW - ERROR = '\033[91m' # RED - RESET = '\033[0m' # RESET COLOR - -def save_result(result): - """ - Saves json result to json file at the specified path. - """ - file_name = "log.json" - if platform.system() == "Linux" or platform.system() == "Darwin": - path = os.path.join(os.path.join(os.path.expanduser('~')), 'Desktop', file_name) - - if platform.system() == "Windows": - path = os.path.join(os.environ["HOMEPATH"], "Desktop", file_name) - - data = json.dumps(result, indent=1, ensure_ascii=True) - with open(path, 'w') as outfile: - outfile.write(data + '\n') diff --git a/main/DNA_Generator.py b/main/DNA_Generator.py index d045e2f..1a76be5 100644 --- a/main/DNA_Generator.py +++ b/main/DNA_Generator.py @@ -3,15 +3,13 @@ import bpy import os -import re import copy import time import json import random from functools import partial -from .loading_animation import Loader -from . import Rarity, Logic, Checks, Material_Generator -from .Constants import bcolors, removeList, remove_file_by_extension +from . import Rarity, Logic, Material_Generator, Helpers +from .Helpers import bcolors, Loader def get_hierarchy(): @@ -124,10 +122,10 @@ def get_hierarchy(): return hierarchy -def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials, materialsFile): +def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials, materialsFile, enable_debug): + """ + Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the DNAList. """ - Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the DNAList. - """ hierarchy = get_hierarchy() @@ -164,7 +162,6 @@ def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enable """ singleDNA = "" - # Comments for debugging random, rarity, logic, and materials. if not enableRarity: singleDNA = createDNArandom(hierarchy) # print("============") @@ -214,7 +211,7 @@ def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enable # Messages: - Checks.raise_Warning_collectionSize(DNAList, collectionSize) + Helpers.raise_Warning_collectionSize(DNAList, collectionSize) # Data stored in batchDataDictionary: DataDictionary["numNFTsGenerated"] = len(DNAList) @@ -281,7 +278,7 @@ def makeBatches(collectionSize, nftsPerBatch, save_path, batch_json_save_path): def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, enableMaterials, - materialsFile, Blend_My_NFTs_Output, batch_json_save_path): + materialsFile, Blend_My_NFTs_Output, batch_json_save_path, enable_debug): """ Creates NFTRecord.json file and sends "batchDataDictionary" to it. NFTRecord.json is a permanent record of all DNA you've generated with all attribute variants. If you add new variants or attributes to your .blend file, other scripts @@ -290,7 +287,7 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e """ # Checking Scene is compatible with BMNFTs: - Checks.check_Scene() + Helpers.check_Scene() # Messages: print( @@ -313,18 +310,18 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e def create_nft_data(): try: DataDictionary = generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials, - materialsFile) + materialsFile, enable_debug) NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json") # Checks: - Checks.raise_Warning_maxNFTs(nftsPerBatch, collectionSize) - Checks.check_Duplicates(DataDictionary["DNAList"]) - Checks.raise_Error_ZeroCombinations() + Helpers.raise_Warning_maxNFTs(nftsPerBatch, collectionSize) + Helpers.check_Duplicates(DataDictionary["DNAList"]) + Helpers.raise_Error_ZeroCombinations() if enableRarity: - Checks.check_Rarity(DataDictionary["hierarchy"], DataDictionary["DNAList"], - os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data")) + Helpers.check_Rarity(DataDictionary["hierarchy"], DataDictionary["DNAList"], + os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data")) except FileNotFoundError: raise FileNotFoundError( diff --git a/main/Exporter.py b/main/Exporter.py index 76a67cb..6ed5fec 100644 --- a/main/Exporter.py +++ b/main/Exporter.py @@ -10,8 +10,8 @@ import json import smtplib import datetime import platform -from .loading_animation import Loader -from .Constants import bcolors, removeList, remove_file_by_extension + +from .Helpers import bcolors, Loader from .Metadata import createCardanoMetadata, createSolanaMetaData, createErc721MetaData diff --git a/main/Checks.py b/main/Helpers.py similarity index 60% rename from main/Checks.py rename to main/Helpers.py index ab50e7c..d50984d 100644 --- a/main/Checks.py +++ b/main/Helpers.py @@ -1,21 +1,122 @@ -# Purpose: -# The purpose of this file is to check the NFTRecord.json for duplicate NFT DNA and returns any found in the console. -# It also checks the percentage each variant is chosen in the NFTRecord, then compares it with its rarity percentage -# set in the .blend file. - -# This file is provided for transparency. The accuracy of the rarity values you set in your .blend file as outlined in -# the README.md file are dependent on the maxNFTs, and the maximum number of combinations of your NFT collection. - import bpy import os import json +import platform +from time import sleep +from itertools import cycle +from threading import Thread +from shutil import get_terminal_size from collections import Counter, defaultdict -from . import DNA_Generator, get_combinations -from .Constants import bcolors, removeList, remove_file_by_extension +from . import DNA_Generator -# Checks: +# ======== CONSTANTS ======== # + +# This section is used for debugging, coding, or general testing purposes. + + +def enable_debug(enable_debug_bool): + if enable_debug_bool: + import logging + + logging.basicConfig( + filename="./log.txt", + level=logging.DEBUG, + format='[%(levelname)s][%(asctime)s]\n%(message)s\n', + datefmt='%Y-%m-%d %H:%M:%S' + ) + + +# ======== CONSTANTS ======== # + +# Constants are used for storing or updating constant values that may need to be changes depending on system +# requirements and different use-cases. + +removeList = [".gitignore", ".DS_Store", "desktop.ini", ".ini"] + + +def remove_file_by_extension(dirlist): + """ + Checks if a given directory list contains any of the files or file extensions listed above, if so, remove them from + list and return a clean dir list. These files interfer with BMNFTs operations and should be removed whenever dealing + with directories. + """ + + if str(type(dirlist)) == "": + dirlist = list(dirlist) # converts single string path to list if dir pasted as string + + return_dirs = [] + for directory in dirlist: + if not str(os.path.split(directory)[1]) in removeList: + return_dirs.append(directory) + + return return_dirs + + +class bcolors: + """ + The colour of console messages. + """ + + OK = '\033[92m' # GREEN + WARNING = '\033[93m' # YELLOW + ERROR = '\033[91m' # RED + RESET = '\033[0m' # RESET COLOR + + +def save_result(result): + """ + Saves json result to json file at the specified path. + """ + file_name = "log.json" + if platform.system() == "Linux" or platform.system() == "Darwin": + path = os.path.join(os.path.join(os.path.expanduser('~')), 'Desktop', file_name) + + if platform.system() == "Windows": + path = os.path.join(os.environ["HOMEPATH"], "Desktop", file_name) + + data = json.dumps(result, indent=1, ensure_ascii=True) + with open(path, 'w') as outfile: + outfile.write(data + '\n') + + +# ======== GET COMBINATIONS ======== # + +# This section is used to get the number of combinations for checks and the UI display + +def get_combinations(): + """ + Returns "combinations", the number of all possible NFT DNA for a given Blender scene formatted to BMNFTs conventions + combinations. + """ + + hierarchy = DNA_Generator.get_hierarchy() + hierarchyByNum = [] + + for i in hierarchy: + # Ignore Collections with nothing in them + if len(hierarchy[i]) != 0: + hierarchyByNum.append(len(hierarchy[i])) + else: + print(f"The following collection has been identified as empty: {i}") + + combinations = 1 + for i in hierarchyByNum: + combinations = combinations * i + + return combinations + + +# ======== CHECKS ======== # + +# This section is used to check the NFTRecord.json for duplicate NFT DNA and returns any found in the console. +# It also checks the percentage each variant is chosen in the NFTRecord, then compares it with its rarity percentage +# set in the .blend file. + +# This section is provided for transparency. The accuracy of the rarity values you set in your .blend file as outlined +# in the README.md file are dependent on the maxNFTs, and the maximum number of combinations of your NFT collection. + def check_Scene(): # Not complete """ Checks if Blender file Scene follows the Blend_My_NFTs conventions. If not, raises error with all instances of @@ -44,6 +145,7 @@ def check_Scene(): # Not complete # attribute_naming_conventions + def check_Rarity(hierarchy, DNAListFormatted, save_path): """Checks rarity percentage of each Variant, then sends it to RarityData.json in NFT_Data folder.""" @@ -51,7 +153,6 @@ def check_Rarity(hierarchy, DNAListFormatted, save_path): for i in DNAListFormatted: DNAList.append(list(i.keys())[0]) - numNFTsGenerated = len(DNAList) numDict = defaultdict(list) @@ -90,7 +191,7 @@ def check_Rarity(hierarchy, DNAListFormatted, save_path): if l == k: name = fullNumName[i][k] num = numDict[j][l] - x[name] = [(str(round(((num/numNFTsGenerated)*100), 2)) + "%"), str(num)] + x[name] = [(str(round(((num / numNFTsGenerated) * 100), 2)) + "%"), str(num)] completeData[i] = x @@ -112,13 +213,13 @@ def check_Rarity(hierarchy, DNAListFormatted, save_path): path = os.path.join(save_path, "RarityData.json") print(bcolors.OK + f"Rarity Data has been saved to {path}." + bcolors.RESET) + def check_Duplicates(DNAListFormatted): """Checks if there are duplicates in DNAList before NFTRecord.json is sent to JSON file.""" DNAList = [] for i in DNAListFormatted: DNAList.append(list(i.keys())[0]) - duplicates = 0 seen = set() @@ -130,6 +231,7 @@ def check_Duplicates(DNAListFormatted): print(f"\nNFTRecord.json contains {duplicates} duplicate NFT DNA.") + def check_FailedBatches(batch_json_save_path): fail_state = False failed_batch = None @@ -151,6 +253,7 @@ def check_FailedBatches(batch_json_save_path): return fail_state, failed_batch, failed_dna, failed_dna_index + # Raise Errors: def raise_Error_numBatches(maxNFTs, nftsPerBatch): """Checks if number of Batches is less than maxNFTs, if not raises error.""" @@ -168,9 +271,10 @@ def raise_Error_numBatches(maxNFTs, nftsPerBatch): f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" ) + def raise_Error_ZeroCombinations(): """Checks if combinations is greater than 0, if so, raises error.""" - if get_combinations.get_combinations() == 0: + if get_combinations() == 0: raise ValueError( f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" f"The number of all possible combinations is ZERO. Please review your Blender scene and ensure it follows " @@ -179,6 +283,7 @@ def raise_Error_ZeroCombinations(): f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" ) + def raise_Error_numBatchesGreaterThan(numBatches): if numBatches < 1: raise ValueError( @@ -189,8 +294,8 @@ def raise_Error_numBatchesGreaterThan(numBatches): f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" ) -# Raise Warnings: +# Raise Warnings: def raise_Warning_maxNFTs(nftsPerBatch, collectionSize): """ Prints warning if nftsPerBatch is greater than collectionSize. @@ -202,6 +307,7 @@ def raise_Warning_maxNFTs(nftsPerBatch, collectionSize): f"The number of NFTs Per Batch you set is smaller than the NFT Collection Size you set.\n{bcolors.RESET}" ) + def raise_Warning_collectionSize(DNAList, collectionSize): """ Prints warning if BMNFTs cannot generate requested number of NFTs from a given collectionSize. @@ -211,9 +317,67 @@ def raise_Warning_collectionSize(DNAList, collectionSize): print(f"\n{bcolors.WARNING} \nWARNING: \n" f"Blend_My_NFTs cannot generate {collectionSize} NFTs." f" Only {len(DNAList)} NFT DNA were generated." - + f"\nThis might be for a number of reasons:" f"\n a) Rarity is preventing combinations from being generated (See https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n" f"\n b) Logic is preventing combinations from being generated (See https://github.com/torrinworx/Blend_My_NFTs#logic).\n" f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or Attributes to increase the recommended collection size.\n" f"\n{bcolors.RESET}") + + +# ======== LOADING ANIMATION ======== # + +# This section is used for the loading animation used in the system console. + +class Loader: + def __init__(self, desc="Loading...", end="Done!", timeout=0.1): + """ + A loader-like context manager + + Args: + desc (str, optional): The loader's description. Defaults to "Loading...". + end (str, optional): Final print. Defaults to "Done!". + timeout (float, optional): Sleep time between prints. Defaults to 0.1. + """ + self.desc = desc + self.end = end + self.timeout = timeout + + self._thread = Thread(target=self._animate, daemon=True) + self.steps = [ + " [== ]", + " [ == ]", + " [ == ]", + " [ == ]", + " [ == ]", + " [ ==]", + " [ == ]", + " [ == ]", + " [ == ]", + " [ == ]", + ] + self.done = False + + def start(self): + self._thread.start() + return self + + def _animate(self): + for c in cycle(self.steps): + if self.done: + break + print(f"\r{self.desc} {c}", flush=True, end="") + sleep(self.timeout) + + def __enter__(self): + self.start() + + def stop(self): + self.done = True + cols = get_terminal_size((80, 20)).columns + print("\r" + " " * cols, end="", flush=True) + print(f"\r{self.end}", flush=True) + + def __exit__(self, exc_type, exc_value, tb): + # handle exceptions with those variables ^ + self.stop() diff --git a/main/Intermediate.py b/main/Intermediate.py index a5479e1..6068a47 100644 --- a/main/Intermediate.py +++ b/main/Intermediate.py @@ -53,7 +53,8 @@ def send_To_Record_JSON(input, reverse_order=False): input.enableMaterials, input.materialsFile, input.Blend_My_NFTs_Output, - input.batch_json_save_path + input.batch_json_save_path, + input.enable_debug, ) diff --git a/main/Logic.py b/main/Logic.py index d425c70..006b5bc 100644 --- a/main/Logic.py +++ b/main/Logic.py @@ -5,7 +5,7 @@ import bpy import random import collections -from .Constants import bcolors, removeList, remove_file_by_extension, save_result +from .Helpers import bcolors, removeList, remove_file_by_extension, save_result def reconstructDNA(deconstructedDNA): diff --git a/main/Material_Generator.py b/main/Material_Generator.py index c5866ad..0218838 100644 --- a/main/Material_Generator.py +++ b/main/Material_Generator.py @@ -7,7 +7,7 @@ import bpy import json import random -from .Constants import bcolors, removeList, remove_file_by_extension, save_result +from .Helpers import bcolors, removeList, remove_file_by_extension, save_result def select_material(materialList, variant, enableRarity): diff --git a/main/Rarity.py b/main/Rarity.py index eff5c7c..b806f3f 100644 --- a/main/Rarity.py +++ b/main/Rarity.py @@ -4,7 +4,7 @@ import bpy import random -from .Constants import bcolors, removeList, remove_file_by_extension +from .Helpers import bcolors, removeList, remove_file_by_extension def createDNArarity(hierarchy): diff --git a/main/Refactorer.py b/main/Refactorer.py index 6566753..98eb970 100644 --- a/main/Refactorer.py +++ b/main/Refactorer.py @@ -6,7 +6,7 @@ import os import json import shutil -from .Constants import bcolors, removeList, remove_file_by_extension +from .Helpers import bcolors, removeList, remove_file_by_extension def reformatNFTCollection(refactor_panel_input): diff --git a/main/get_combinations.py b/main/get_combinations.py deleted file mode 100644 index f1db7eb..0000000 --- a/main/get_combinations.py +++ /dev/null @@ -1,26 +0,0 @@ -import bpy - -from . import DNA_Generator - - -def get_combinations(): - """ - Returns "combinations", the number of all possible NFT DNA for a given Blender scene formatted to BMNFTs conventions - combinations. - """ - - hierarchy = DNA_Generator.get_hierarchy() - hierarchyByNum = [] - - for i in hierarchy: - # Ignore Collections with nothing in them - if len(hierarchy[i]) != 0: - hierarchyByNum.append(len(hierarchy[i])) - else: - print(f"The following collection has been identified as empty: {i}") - - combinations = 1 - for i in hierarchyByNum: - combinations = combinations*i - - return combinations diff --git a/main/loading_animation.py b/main/loading_animation.py deleted file mode 100644 index d97e69d..0000000 --- a/main/loading_animation.py +++ /dev/null @@ -1,69 +0,0 @@ -from itertools import cycle -from shutil import get_terminal_size -from threading import Thread -from time import sleep - - -class Loader: - def __init__(self, desc="Loading...", end="Done!", timeout=0.1): - """ - A loader-like context manager - - Args: - desc (str, optional): The loader's description. Defaults to "Loading...". - end (str, optional): Final print. Defaults to "Done!". - timeout (float, optional): Sleep time between prints. Defaults to 0.1. - """ - self.desc = desc - self.end = end - self.timeout = timeout - - self._thread = Thread(target=self._animate, daemon=True) - self.steps = [ - " [== ]", - " [ == ]", - " [ == ]", - " [ == ]", - " [ == ]", - " [ ==]", - " [ == ]", - " [ == ]", - " [ == ]", - " [ == ]", - ] - self.done = False - - def start(self): - self._thread.start() - return self - - def _animate(self): - for c in cycle(self.steps): - if self.done: - break - print(f"\r{self.desc} {c}", flush=True, end="") - sleep(self.timeout) - - def __enter__(self): - self.start() - - def stop(self): - self.done = True - cols = get_terminal_size((80, 20)).columns - print("\r" + " " * cols, end="", flush=True) - print(f"\r{self.end}", flush=True) - - def __exit__(self, exc_type, exc_value, tb): - # handle exceptions with those variables ^ - self.stop() - - -if __name__ == "__main__": - with Loader("Loading with context manager..."): - for i in range(10): - sleep(0.25) - - loader = Loader("Loading with object...", "That was fast!", 0.05).start() - for i in range(10): - sleep(0.25) - loader.stop() From 5e353e42b6dfaa5e26b6b77a3297fc322d7bffc2 Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Wed, 24 Aug 2022 09:06:40 -0400 Subject: [PATCH 02/12] Reformatting print statements --- main/Exporter.py | 14 ++++++++++---- main/Helpers.py | 2 +- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/main/Exporter.py b/main/Exporter.py index 6ed5fec..cb91aa3 100644 --- a/main/Exporter.py +++ b/main/Exporter.py @@ -137,7 +137,7 @@ def render_and_save_NFTs(input): # If failed Batch is detected and user is resuming its generation: if input.fail_state: - print(f"{bcolors.ERROR}\nResuming Failed Batch {input.failed_batch}\n{bcolors.RESET}") + print(f"{bcolors.ERROR}\nResuming Batch #{input.failed_batch}\n{bcolors.RESET}") NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.failed_batch, input.batch_json_save_path) for a in range(input.failed_dna): del BatchDNAList[0] @@ -145,7 +145,7 @@ def render_and_save_NFTs(input): # If user is generating the normal way: else: - print(f"\nGenerating Batch {input.batchToGenerate}\n") + print(f"\nGenerating Batch #{input.batchToGenerate}\n") NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.batchToGenerate, input.batch_json_save_path) save_generation_state(input) x = 1 @@ -262,8 +262,14 @@ def render_and_save_NFTs(input): # ob = bpy.data.objects['Text'] # Object name # ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob - print(f"\n{bcolors.OK}|--- Generating NFT {x}/{NFTs_in_Batch}: {name} ---|{bcolors.RESET}") - print(f"DNA attribute list:\n{dnaDictionary}\nDNA Code:{single_dna}") + print(f"\n{bcolors.OK}======== Generating NFT {x}/{NFTs_in_Batch}: {name} ========{bcolors.RESET}") + print(f"Variants selected:") + print(f"{dnaDictionary}") + if input.enableMaterials: + print(f"Materials selected:") + print(f"{materialdnaDictionary}") + + print(f"DNA Code:{full_single_dna}") for c in dnaDictionary: collection = dnaDictionary[c] diff --git a/main/Helpers.py b/main/Helpers.py index d50984d..b85fd81 100644 --- a/main/Helpers.py +++ b/main/Helpers.py @@ -11,7 +11,7 @@ from collections import Counter, defaultdict from . import DNA_Generator -# ======== CONSTANTS ======== # +# ======== ENABLE DEBUG ======== # # This section is used for debugging, coding, or general testing purposes. From 4a939bb9e65bac7ce33cb2028854f04f7bcdc6db Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Wed, 24 Aug 2022 09:57:59 -0400 Subject: [PATCH 03/12] Reformatting dependencies - Deprecated Rarity.py, moved to Helpers to avoid circular import issues - Moved get_hierarchy() function from DNA_Generator.py to Helpers to avoid circular import issues - Reformatted some comments and example code --- __init__.py | 6 +- main/DNA_Generator.py | 182 +++++++++++++----------------------------- main/Exporter.py | 4 +- main/Helpers.py | 123 ++++++++++++++++++++++++++-- main/Logic.py | 5 +- main/Rarity.py | 50 ------------ 6 files changed, 182 insertions(+), 188 deletions(-) delete mode 100644 main/Rarity.py diff --git a/__init__.py b/__init__.py index 35f978a..53eca7b 100644 --- a/__init__.py +++ b/__init__.py @@ -44,7 +44,6 @@ from main import \ Logic, \ Material_Generator, \ Metadata, \ - Rarity, \ Refactorer from UILists import \ @@ -61,7 +60,6 @@ if "bpy" in locals(): "Logic": Logic, "Material_Generator": Material_Generator, "Metadata": Metadata, - "Rarity": Rarity, "Refactorer": Refactorer, "Custom_Metadata_UIList": Custom_Metadata_UIList, "Logic_UIList": Logic_UIList, @@ -547,7 +545,7 @@ class resume_failed_batch(bpy.types.Operator): file_name = os.path.join(_batch_json_save_path, "Batch{}.json".format(_batchToGenerate)) batchData = json.load(open(file_name)) - _fail_state, _failed_batch, _failed_dna, _failed_dna_index = Checks.check_FailedBatches(_batch_json_save_path) + _fail_state, _failed_batch, _failed_dna, _failed_dna_index = Helpers.check_FailedBatches(_batch_json_save_path) render_settings = batchData["Generation Save"][-1]["Render_Settings"] @@ -887,7 +885,7 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): batch_json_save_path = os.path.join(Blend_My_NFTs_Output, "Batch_Data") nftBatch_save_path = os.path.join(save_path, "Blend_My_NFTs Output", "Generated NFT Batches") - fail_state, failed_batch, failed_dna, failed_dna_index = Checks.check_FailedBatches(batch_json_save_path) + fail_state, failed_batch, failed_dna, failed_dna_index = Helpers.check_FailedBatches(batch_json_save_path) if fail_state: row = layout.row() diff --git a/main/DNA_Generator.py b/main/DNA_Generator.py index 1a76be5..3d8fe3a 100644 --- a/main/DNA_Generator.py +++ b/main/DNA_Generator.py @@ -3,123 +3,11 @@ import bpy import os -import copy import time import json import random from functools import partial -from . import Rarity, Logic, Material_Generator, Helpers -from .Helpers import bcolors, Loader - - -def get_hierarchy(): - """ - Returns the hierarchy of a given Blender scene. - """ - - coll = bpy.context.scene.collection - - scriptIgnoreCollection = bpy.data.collections["Script_Ignore"] - - listAllCollInScene = [] - listAllCollections = [] - - def traverse_tree(t): - yield t - for child in t.children: - yield from traverse_tree(child) - - for c in traverse_tree(coll): - listAllCollInScene.append(c) - - for i in listAllCollInScene: - listAllCollections.append(i.name) - - listAllCollections.remove(scriptIgnoreCollection.name) - - if "Scene Collection" in listAllCollections: - listAllCollections.remove("Scene Collection") - - if "Master Collection" in listAllCollections: - listAllCollections.remove("Master Collection") - - def allScriptIgnore(scriptIgnoreCollection): - # Removes all collections, sub collections in Script_Ignore collection from listAllCollections. - - for coll in list(scriptIgnoreCollection.children): - listAllCollections.remove(coll.name) - listColl = list(coll.children) - if len(listColl) > 0: - allScriptIgnore(coll) - - allScriptIgnore(scriptIgnoreCollection) - listAllCollections.sort() - - exclude = ["_"] # Excluding characters that identify a Variant - attributeCollections = copy.deepcopy(listAllCollections) - - def filter_num(): - """ - This function removes items from 'attributeCollections' if they include values from the 'exclude' variable. - It removes child collections from the parent collections in from the "listAllCollections" list. - """ - for x in attributeCollections: - if any(a in x for a in exclude): - attributeCollections.remove(x) - - for i in range(len(listAllCollections)): - filter_num() - - attributeVariants = [x for x in listAllCollections if x not in attributeCollections] - attributeCollections1 = copy.deepcopy(attributeCollections) - - def attributeData(attributeVariants): - """ - Creates a dictionary of each attribute - """ - allAttDataList = {} - for i in attributeVariants: - # Check if name follows naming conventions: - if int(i.count("_")) > 2 and int(i.split("_")[1]) > 0: - raise Exception( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"There is a naming issue with the following Attribute/Variant: '{i}'\n" - f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - - try: - number = i.split("_")[1] - name = i.split("_")[0] - rarity = i.split("_")[2] - except IndexError: - raise Exception( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"There is a naming issue with the following Attribute/Variant: '{i}'\n" - f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - - allAttDataList[i] = {"name": name, "number": number, "rarity": rarity} - return allAttDataList - - variantMetaData = attributeData(attributeVariants) - - hierarchy = {} - for i in attributeCollections1: - colParLong = list(bpy.data.collections[str(i)].children) - colParShort = {} - for x in colParLong: - colParShort[x.name] = None - hierarchy[i] = colParShort - - for a in hierarchy: - for b in hierarchy[a]: - for x in variantMetaData: - if str(x) == str(b): - (hierarchy[a])[b] = variantMetaData[x] - - return hierarchy +from . import Logic, Material_Generator, Helpers def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials, materialsFile, enable_debug): @@ -127,7 +15,7 @@ def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enable Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the DNAList. """ - hierarchy = get_hierarchy() + hierarchy = Helpers.get_hierarchy() # DNA random, Rarity and Logic methods: DataDictionary = {} @@ -156,6 +44,48 @@ def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enable return str(dna) + def createDNArarity(hierarchy): + """ + Sorts through DataDictionary and appropriately weights each variant based on their rarity percentage set in Blender + ("rarity" in DNA_Generator). Then + """ + singleDNA = "" + + for i in hierarchy: + number_List_Of_i = [] + rarity_List_Of_i = [] + ifZeroBool = None + + for k in hierarchy[i]: + number = hierarchy[i][k]["number"] + number_List_Of_i.append(number) + + rarity = hierarchy[i][k]["rarity"] + rarity_List_Of_i.append(float(rarity)) + + for x in rarity_List_Of_i: + if x == 0: + ifZeroBool = True + elif x != 0: + ifZeroBool = False + + try: + if ifZeroBool: + variantByNum = random.choices(number_List_Of_i, k=1) + elif not ifZeroBool: + variantByNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1) + except IndexError: + raise IndexError( + f"\n{Helpers.bcolors.ERROR}Blend_My_NFTs Error:\n" + f"An issue was found within the Attribute collection '{i}'. For more information on Blend_My_NFTs compatible scenes, " + f"see:\n{Helpers.bcolors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + + singleDNA += "-" + str(variantByNum[0]) + singleDNA = ''.join(singleDNA.split('-', 1)) + return singleDNA + def singleCompleteDNA(): """ This function applies Rarity and Logic to a single DNA created by createDNASingle() if Rarity or Logic specified @@ -167,7 +97,7 @@ def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enable # print("============") # print(f"Original DNA: {singleDNA}") if enableRarity: - singleDNA = Rarity.createDNArarity(hierarchy) + singleDNA = createDNArarity(hierarchy) # print(f"Rarity DNA: {singleDNA}") if enableLogic: @@ -297,13 +227,13 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e if not enableRarity and not enableLogic: print( - f"{bcolors.OK}NFT DNA will be determined randomly, no special properties or parameters are applied.\n{bcolors.RESET}") + f"{Helpers.bcolors.OK}NFT DNA will be determined randomly, no special properties or parameters are applied.\n{Helpers.bcolors.RESET}") if enableRarity: - print(f"{bcolors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account.\n{bcolors.RESET}") + print(f"{Helpers.bcolors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account.\n{Helpers.bcolors.RESET}") if enableLogic: - print(f"{bcolors.OK}Logic is ON. {len(list(logicFile.keys()))} rules detected and applied.\n{bcolors.RESET}") + print(f"{Helpers.bcolors.OK}Logic is ON. {len(list(logicFile.keys()))} rules detected and applied.\n{Helpers.bcolors.RESET}") time_start = time.time() @@ -325,10 +255,10 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e except FileNotFoundError: raise FileNotFoundError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" + f"\n{Helpers.bcolors.ERROR}Blend_My_NFTs Error:\n" f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " f"the naming conventions and scene structure. For more information, " - f"see:\n{bcolors.RESET}" + f"see:\n{Helpers.bcolors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) finally: @@ -340,20 +270,20 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e outfile.write(ledger + '\n') print( - f"\n{bcolors.OK}Blend_My_NFTs Success:\n" - f"{len(DataDictionary['DNAList'])} NFT DNA saved to {NFTRecord_save_path}. NFT DNA Successfully created.\n{bcolors.RESET}") + f"\n{Helpers.bcolors.OK}Blend_My_NFTs Success:\n" + f"{len(DataDictionary['DNAList'])} NFT DNA saved to {NFTRecord_save_path}. NFT DNA Successfully created.\n{Helpers.bcolors.RESET}") except: raise ( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" + f"\n{Helpers.bcolors.ERROR}Blend_My_NFTs Error:\n" f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " f"the naming conventions and scene structure. For more information, " - f"see:\n{bcolors.RESET}" + f"see:\n{Helpers.bcolors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) # Loading Animation: - loading = Loader(f'Creating NFT DNA...', '').start() + loading = Helpers.Loader(f'Creating NFT DNA...', '').start() create_nft_data() makeBatches(collectionSize, nftsPerBatch, save_path, batch_json_save_path) loading.stop() @@ -361,5 +291,5 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e time_end = time.time() print( - f"{bcolors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{bcolors.RESET}" + f"{Helpers.bcolors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{Helpers.bcolors.RESET}" ) diff --git a/main/Exporter.py b/main/Exporter.py index cb91aa3..46587df 100644 --- a/main/Exporter.py +++ b/main/Exporter.py @@ -1,6 +1,6 @@ # Purpose: -# This file takes a given Batch created by DNA_Generator.py and tells blender to render the image or export a 3D model to -# the NFT_Output folder. +# This file takes a given Batch created by DNA_Generator.py and tells blender to render the image or export a 3D model +# to the NFT_Output folder. import bpy import os diff --git a/main/Helpers.py b/main/Helpers.py index b85fd81..89e229d 100644 --- a/main/Helpers.py +++ b/main/Helpers.py @@ -1,6 +1,7 @@ import bpy import os import json +import copy import platform from time import sleep from itertools import cycle @@ -8,9 +9,6 @@ from threading import Thread from shutil import get_terminal_size from collections import Counter, defaultdict -from . import DNA_Generator - - # ======== ENABLE DEBUG ======== # # This section is used for debugging, coding, or general testing purposes. @@ -81,6 +79,121 @@ def save_result(result): outfile.write(data + '\n') +# ======== GET COMBINATIONS ======== # + +# This section retrieves the Scene hierarchy from the current Blender file. + + +def get_hierarchy(): + """ + Returns the hierarchy of a given Blender scene. + """ + + coll = bpy.context.scene.collection + + scriptIgnoreCollection = bpy.data.collections["Script_Ignore"] + + listAllCollInScene = [] + listAllCollections = [] + + def traverse_tree(t): + yield t + for child in t.children: + yield from traverse_tree(child) + + for c in traverse_tree(coll): + listAllCollInScene.append(c) + + for i in listAllCollInScene: + listAllCollections.append(i.name) + + listAllCollections.remove(scriptIgnoreCollection.name) + + if "Scene Collection" in listAllCollections: + listAllCollections.remove("Scene Collection") + + if "Master Collection" in listAllCollections: + listAllCollections.remove("Master Collection") + + def allScriptIgnore(scriptIgnoreCollection): + # Removes all collections, sub collections in Script_Ignore collection from listAllCollections. + + for coll in list(scriptIgnoreCollection.children): + listAllCollections.remove(coll.name) + listColl = list(coll.children) + if len(listColl) > 0: + allScriptIgnore(coll) + + allScriptIgnore(scriptIgnoreCollection) + listAllCollections.sort() + + exclude = ["_"] # Excluding characters that identify a Variant + attributeCollections = copy.deepcopy(listAllCollections) + + def filter_num(): + """ + This function removes items from 'attributeCollections' if they include values from the 'exclude' variable. + It removes child collections from the parent collections in from the "listAllCollections" list. + """ + for x in attributeCollections: + if any(a in x for a in exclude): + attributeCollections.remove(x) + + for i in range(len(listAllCollections)): + filter_num() + + attributeVariants = [x for x in listAllCollections if x not in attributeCollections] + attributeCollections1 = copy.deepcopy(attributeCollections) + + def attributeData(attributeVariants): + """ + Creates a dictionary of each attribute + """ + allAttDataList = {} + for i in attributeVariants: + # Check if name follows naming conventions: + if int(i.count("_")) > 2 and int(i.split("_")[1]) > 0: + raise Exception( + f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" + f"There is a naming issue with the following Attribute/Variant: '{i}'\n" + f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + + try: + number = i.split("_")[1] + name = i.split("_")[0] + rarity = i.split("_")[2] + except IndexError: + raise Exception( + f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" + f"There is a naming issue with the following Attribute/Variant: '{i}'\n" + f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + + allAttDataList[i] = {"name": name, "number": number, "rarity": rarity} + return allAttDataList + + variantMetaData = attributeData(attributeVariants) + + hierarchy = {} + for i in attributeCollections1: + colParLong = list(bpy.data.collections[str(i)].children) + colParShort = {} + for x in colParLong: + colParShort[x.name] = None + hierarchy[i] = colParShort + + for a in hierarchy: + for b in hierarchy[a]: + for x in variantMetaData: + if str(x) == str(b): + (hierarchy[a])[b] = variantMetaData[x] + + return hierarchy + + # ======== GET COMBINATIONS ======== # # This section is used to get the number of combinations for checks and the UI display @@ -91,7 +204,7 @@ def get_combinations(): combinations. """ - hierarchy = DNA_Generator.get_hierarchy() + hierarchy = get_hierarchy() hierarchyByNum = [] for i in hierarchy: @@ -140,7 +253,7 @@ def check_Scene(): # Not complete f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" ) - hierarchy = DNA_Generator.get_hierarchy() + hierarchy = get_hierarchy() collections = bpy.context.scene.collection # attribute_naming_conventions diff --git a/main/Logic.py b/main/Logic.py index 006b5bc..1271b58 100644 --- a/main/Logic.py +++ b/main/Logic.py @@ -194,7 +194,9 @@ def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, resu def create_dicts(hierarchy, rule_list_items, result_dict_type): - # Example of output structure: + """ + Example of output structure: + structure = { "attribute1": { "variant1": [ @@ -229,6 +231,7 @@ def create_dicts(hierarchy, rule_list_items, result_dict_type): ] } } + """ items_returned = collections.defaultdict(dict) for a in rule_list_items: diff --git a/main/Rarity.py b/main/Rarity.py deleted file mode 100644 index b806f3f..0000000 --- a/main/Rarity.py +++ /dev/null @@ -1,50 +0,0 @@ -# Purpose: -# This file sorts the Variants in DNA slots based on the rarity value set in the name. - -import bpy -import random - -from .Helpers import bcolors, removeList, remove_file_by_extension - - -def createDNArarity(hierarchy): - """ - Sorts through DataDictionary and appropriately weights each variant based on their rarity percentage set in Blender - ("rarity" in DNA_Generator). Then - """ - singleDNA = "" - - for i in hierarchy: - number_List_Of_i = [] - rarity_List_Of_i = [] - ifZeroBool = None - - for k in hierarchy[i]: - number = hierarchy[i][k]["number"] - number_List_Of_i.append(number) - - rarity = hierarchy[i][k]["rarity"] - rarity_List_Of_i.append(float(rarity)) - - for x in rarity_List_Of_i: - if x == 0: - ifZeroBool = True - elif x != 0: - ifZeroBool = False - - try: - if ifZeroBool: - variantByNum = random.choices(number_List_Of_i, k=1) - elif not ifZeroBool: - variantByNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1) - except IndexError: - raise IndexError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Attribute collection '{i}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - - singleDNA += "-" + str(variantByNum[0]) - singleDNA = ''.join(singleDNA.split('-', 1)) - return singleDNA \ No newline at end of file From e7caa49faf5769d5984df94d56d212600bf6145f Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Wed, 24 Aug 2022 10:00:53 -0400 Subject: [PATCH 04/12] Fixing Print statement spacing --- main/Exporter.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/main/Exporter.py b/main/Exporter.py index 46587df..be44dc7 100644 --- a/main/Exporter.py +++ b/main/Exporter.py @@ -263,13 +263,13 @@ def render_and_save_NFTs(input): # ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob print(f"\n{bcolors.OK}======== Generating NFT {x}/{NFTs_in_Batch}: {name} ========{bcolors.RESET}") - print(f"Variants selected:") + print(f"\nVariants selected:") print(f"{dnaDictionary}") if input.enableMaterials: - print(f"Materials selected:") + print(f"\nMaterials selected:") print(f"{materialdnaDictionary}") - print(f"DNA Code:{full_single_dna}") + print(f"\nDNA Code:{full_single_dna}") for c in dnaDictionary: collection = dnaDictionary[c] From 3a35a66e7529bad2d06e02c706c93caf5a1c1803 Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Wed, 24 Aug 2022 10:50:51 -0400 Subject: [PATCH 05/12] pep8 formatting Helpers file - Minor changes to print statements in console --- __init__.py | 4 +- main/DNA_Generator.py | 47 +++--- main/Exporter.py | 24 +-- main/Helpers.py | 331 +++++++++++++++++++------------------ main/Logic.py | 10 +- main/Material_Generator.py | 14 +- main/Refactorer.py | 2 +- 7 files changed, 220 insertions(+), 212 deletions(-) diff --git a/__init__.py b/__init__.py index 53eca7b..0e33683 100644 --- a/__init__.py +++ b/__init__.py @@ -545,7 +545,7 @@ class resume_failed_batch(bpy.types.Operator): file_name = os.path.join(_batch_json_save_path, "Batch{}.json".format(_batchToGenerate)) batchData = json.load(open(file_name)) - _fail_state, _failed_batch, _failed_dna, _failed_dna_index = Helpers.check_FailedBatches(_batch_json_save_path) + _fail_state, _failed_batch, _failed_dna, _failed_dna_index = Helpers.check_failed_batches(_batch_json_save_path) render_settings = batchData["Generation Save"][-1]["Render_Settings"] @@ -885,7 +885,7 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): batch_json_save_path = os.path.join(Blend_My_NFTs_Output, "Batch_Data") nftBatch_save_path = os.path.join(save_path, "Blend_My_NFTs Output", "Generated NFT Batches") - fail_state, failed_batch, failed_dna, failed_dna_index = Helpers.check_FailedBatches(batch_json_save_path) + fail_state, failed_batch, failed_dna, failed_dna_index = Helpers.check_failed_batches(batch_json_save_path) if fail_state: row = layout.row() diff --git a/main/DNA_Generator.py b/main/DNA_Generator.py index 3d8fe3a..f1241ac 100644 --- a/main/DNA_Generator.py +++ b/main/DNA_Generator.py @@ -76,9 +76,9 @@ def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enable variantByNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1) except IndexError: raise IndexError( - f"\n{Helpers.bcolors.ERROR}Blend_My_NFTs Error:\n" + f"\n{Helpers.TextColors.ERROR}Blend_My_NFTs Error:\n" f"An issue was found within the Attribute collection '{i}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{Helpers.bcolors.RESET}" + f"see:\n{Helpers.TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) @@ -141,7 +141,7 @@ def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enable # Messages: - Helpers.raise_Warning_collectionSize(DNAList, collectionSize) + Helpers.raise_warning_collection_size(DNAList, collectionSize) # Data stored in batchDataDictionary: DataDictionary["numNFTsGenerated"] = len(DNAList) @@ -217,23 +217,30 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e """ # Checking Scene is compatible with BMNFTs: - Helpers.check_Scene() + Helpers.check_scene() # Messages: print( - f"\n========================================\n" - f"Creating NFT Data. Generating {collectionSize} NFT DNA.\n" + f"\n{Helpers.TextColors.OK}======== Creating NFT Data ========{Helpers.TextColors.RESET}" + f"\nGenerating {collectionSize} NFT DNA" ) if not enableRarity and not enableLogic: print( - f"{Helpers.bcolors.OK}NFT DNA will be determined randomly, no special properties or parameters are applied.\n{Helpers.bcolors.RESET}") + f"{Helpers.TextColors.OK}NFT DNA will be determined randomly, no special properties or parameters are " + f"applied.\n{Helpers.TextColors.RESET}") if enableRarity: - print(f"{Helpers.bcolors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account.\n{Helpers.bcolors.RESET}") + print( + f"{Helpers.TextColors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account." + f"{Helpers.TextColors.RESET}" + ) if enableLogic: - print(f"{Helpers.bcolors.OK}Logic is ON. {len(list(logicFile.keys()))} rules detected and applied.\n{Helpers.bcolors.RESET}") + print( + f"{Helpers.TextColors.OK}Logic is ON. {len(list(logicFile.keys()))} rules detected and applied." + f"{Helpers.TextColors.RESET}" + ) time_start = time.time() @@ -245,20 +252,20 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e # Checks: - Helpers.raise_Warning_maxNFTs(nftsPerBatch, collectionSize) - Helpers.check_Duplicates(DataDictionary["DNAList"]) - Helpers.raise_Error_ZeroCombinations() + Helpers.raise_warning_max_nfts(nftsPerBatch, collectionSize) + Helpers.check_duplicates(DataDictionary["DNAList"]) + Helpers.raise_error_zero_combinations() if enableRarity: - Helpers.check_Rarity(DataDictionary["hierarchy"], DataDictionary["DNAList"], + Helpers.check_rarity(DataDictionary["hierarchy"], DataDictionary["DNAList"], os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data")) except FileNotFoundError: raise FileNotFoundError( - f"\n{Helpers.bcolors.ERROR}Blend_My_NFTs Error:\n" + f"\n{Helpers.TextColors.ERROR}Blend_My_NFTs Error:\n" f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " f"the naming conventions and scene structure. For more information, " - f"see:\n{Helpers.bcolors.RESET}" + f"see:\n{Helpers.TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) finally: @@ -270,15 +277,15 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e outfile.write(ledger + '\n') print( - f"\n{Helpers.bcolors.OK}Blend_My_NFTs Success:\n" - f"{len(DataDictionary['DNAList'])} NFT DNA saved to {NFTRecord_save_path}. NFT DNA Successfully created.\n{Helpers.bcolors.RESET}") + f"\n{Helpers.TextColors.OK}Blend_My_NFTs Success:\n" + f"{len(DataDictionary['DNAList'])} NFT DNA saved to {NFTRecord_save_path}. NFT DNA Successfully created.\n{Helpers.TextColors.RESET}") except: raise ( - f"\n{Helpers.bcolors.ERROR}Blend_My_NFTs Error:\n" + f"\n{Helpers.TextColors.ERROR}Blend_My_NFTs Error:\n" f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " f"the naming conventions and scene structure. For more information, " - f"see:\n{Helpers.bcolors.RESET}" + f"see:\n{Helpers.TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) @@ -291,5 +298,5 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e time_end = time.time() print( - f"{Helpers.bcolors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{Helpers.bcolors.RESET}" + f"{Helpers.TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{Helpers.TextColors.RESET}" ) diff --git a/main/Exporter.py b/main/Exporter.py index be44dc7..283d70c 100644 --- a/main/Exporter.py +++ b/main/Exporter.py @@ -11,7 +11,7 @@ import smtplib import datetime import platform -from .Helpers import bcolors, Loader +from .Helpers import TextColors, Loader from .Metadata import createCardanoMetadata, createSolanaMetaData, createErc721MetaData @@ -137,7 +137,7 @@ def render_and_save_NFTs(input): # If failed Batch is detected and user is resuming its generation: if input.fail_state: - print(f"{bcolors.ERROR}\nResuming Batch #{input.failed_batch}\n{bcolors.RESET}") + print(f"{TextColors.ERROR}\nResuming Batch #{input.failed_batch}\n{TextColors.RESET}") NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.failed_batch, input.batch_json_save_path) for a in range(input.failed_dna): del BatchDNAList[0] @@ -247,10 +247,10 @@ def render_and_save_NFTs(input): bpy.data.collections[j].hide_viewport = True except KeyError: raise TypeError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes to " f"your .blned file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read your scene." - f"For more information see:{bcolors.RESET}" + f"For more information see:{TextColors.RESET}" f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) @@ -262,7 +262,7 @@ def render_and_save_NFTs(input): # ob = bpy.data.objects['Text'] # Object name # ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob - print(f"\n{bcolors.OK}======== Generating NFT {x}/{NFTs_in_Batch}: {name} ========{bcolors.RESET}") + print(f"\n{TextColors.OK}======== Generating NFT {x}/{NFTs_in_Batch}: {name} ========{TextColors.RESET}") print(f"\nVariants selected:") print(f"{dnaDictionary}") if input.enableMaterials: @@ -279,7 +279,7 @@ def render_and_save_NFTs(input): time_start_2 = time.time() - # Main paths for batch subfolders: + # Main paths for batch sub-folders: batchFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate)) imageFolder = os.path.join(batchFolder, "Images") @@ -307,7 +307,7 @@ def render_and_save_NFTs(input): # Generation/Rendering: if input.enableImages: - print(f"{bcolors.OK}---Image---{bcolors.RESET}") + print(f"{TextColors.OK}---Image---{TextColors.RESET}") image_render_time_start = time.time() @@ -329,11 +329,11 @@ def render_and_save_NFTs(input): image_render_time_end = time.time() print( - f"{bcolors.OK}Rendered image in {image_render_time_end - image_render_time_start}s.\n{bcolors.RESET}" + f"{TextColors.OK}Rendered image in {image_render_time_end - image_render_time_start}s.\n{TextColors.RESET}" ) if input.enableAnimations: - print(f"{bcolors.OK}---Animation---{bcolors.RESET}") + print(f"{TextColors.OK}---Animation---{TextColors.RESET}") animation_render_time_start = time.time() @@ -380,11 +380,11 @@ def render_and_save_NFTs(input): animation_render_time_end = time.time() print( - f"{bcolors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s.\n{bcolors.RESET}" + f"{TextColors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s.\n{TextColors.RESET}" ) if input.enableModelsBlender: - print(f"{bcolors.OK}---3D Model---{bcolors.RESET}") + print(f"{TextColors.OK}---3D Model---{TextColors.RESET}") model_generation_time_start = time.time() @@ -463,7 +463,7 @@ def render_and_save_NFTs(input): model_generation_time_end = time.time() print( - f"{bcolors.OK}Generated 3D model in {model_generation_time_end - model_generation_time_start}s.\n{bcolors.RESET}" + f"{TextColors.OK}Generated 3D model in {model_generation_time_end - model_generation_time_start}s.\n{TextColors.RESET}" ) # Generating Metadata: diff --git a/main/Helpers.py b/main/Helpers.py index 89e229d..e606bca 100644 --- a/main/Helpers.py +++ b/main/Helpers.py @@ -9,6 +9,7 @@ from threading import Thread from shutil import get_terminal_size from collections import Counter, defaultdict + # ======== ENABLE DEBUG ======== # # This section is used for debugging, coding, or general testing purposes. @@ -19,10 +20,10 @@ def enable_debug(enable_debug_bool): import logging logging.basicConfig( - filename="./log.txt", - level=logging.DEBUG, - format='[%(levelname)s][%(asctime)s]\n%(message)s\n', - datefmt='%Y-%m-%d %H:%M:%S' + filename="./log.txt", + level=logging.DEBUG, + format='[%(levelname)s][%(asctime)s]\n%(message)s\n', + datefmt='%Y-%m-%d %H:%M:%S' ) @@ -36,9 +37,9 @@ removeList = [".gitignore", ".DS_Store", "desktop.ini", ".ini"] def remove_file_by_extension(dirlist): """ - Checks if a given directory list contains any of the files or file extensions listed above, if so, remove them from - list and return a clean dir list. These files interfer with BMNFTs operations and should be removed whenever dealing - with directories. + Checks if a given directory list contains any of the files or file extensions listed above, if so, remove them + from list and return a clean dir list. These files interfere with BMNFTs operations and should be removed + whenever dealing with directories. """ if str(type(dirlist)) == "": @@ -52,7 +53,7 @@ def remove_file_by_extension(dirlist): return return_dirs -class bcolors: +class TextColors: """ The colour of console messages. """ @@ -91,10 +92,10 @@ def get_hierarchy(): coll = bpy.context.scene.collection - scriptIgnoreCollection = bpy.data.collections["Script_Ignore"] + script_ignore_collection = bpy.data.collections["Script_Ignore"] - listAllCollInScene = [] - listAllCollections = [] + list_all_coll_in_scene = [] + list_all_collections = [] def traverse_tree(t): yield t @@ -102,62 +103,62 @@ def get_hierarchy(): yield from traverse_tree(child) for c in traverse_tree(coll): - listAllCollInScene.append(c) + list_all_coll_in_scene.append(c) - for i in listAllCollInScene: - listAllCollections.append(i.name) + for i in list_all_coll_in_scene: + list_all_collections.append(i.name) - listAllCollections.remove(scriptIgnoreCollection.name) + list_all_collections.remove(script_ignore_collection.name) - if "Scene Collection" in listAllCollections: - listAllCollections.remove("Scene Collection") + if "Scene Collection" in list_all_collections: + list_all_collections.remove("Scene Collection") - if "Master Collection" in listAllCollections: - listAllCollections.remove("Master Collection") + if "Master Collection" in list_all_collections: + list_all_collections.remove("Master Collection") - def allScriptIgnore(scriptIgnoreCollection): - # Removes all collections, sub collections in Script_Ignore collection from listAllCollections. + def all_script_ignore(script_ignore_coll): + # Removes all collections, sub collections in Script_Ignore collection from list_all_collections. - for coll in list(scriptIgnoreCollection.children): - listAllCollections.remove(coll.name) - listColl = list(coll.children) - if len(listColl) > 0: - allScriptIgnore(coll) + for collection in list(script_ignore_coll.children): + list_all_collections.remove(collection.name) + list_coll = list(collection.children) + if len(list_coll) > 0: + all_script_ignore(collection) - allScriptIgnore(scriptIgnoreCollection) - listAllCollections.sort() + all_script_ignore(script_ignore_collection) + list_all_collections.sort() exclude = ["_"] # Excluding characters that identify a Variant - attributeCollections = copy.deepcopy(listAllCollections) + attribute_collections = copy.deepcopy(list_all_collections) def filter_num(): """ - This function removes items from 'attributeCollections' if they include values from the 'exclude' variable. - It removes child collections from the parent collections in from the "listAllCollections" list. + This function removes items from 'attribute_collections' if they include values from the 'exclude' variable. + It removes child collections from the parent collections in from the "list_all_collections" list. """ - for x in attributeCollections: - if any(a in x for a in exclude): - attributeCollections.remove(x) + for x in attribute_collections: + if any(i in x for i in exclude): + attribute_collections.remove(x) - for i in range(len(listAllCollections)): + for i in range(len(list_all_collections)): filter_num() - attributeVariants = [x for x in listAllCollections if x not in attributeCollections] - attributeCollections1 = copy.deepcopy(attributeCollections) + attribute_variants = [x for x in list_all_collections if x not in attribute_collections] + attribute_collections1 = copy.deepcopy(attribute_collections) - def attributeData(attributeVariants): + def attribute_data(att_vars): """ - Creates a dictionary of each attribute - """ - allAttDataList = {} - for i in attributeVariants: + Creates a dictionary of each attribute + """ + all_att_data_list = {} + for i in att_vars: # Check if name follows naming conventions: if int(i.count("_")) > 2 and int(i.split("_")[1]) > 0: raise Exception( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"There is a naming issue with the following Attribute/Variant: '{i}'\n" - f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"There is a naming issue with the following Attribute/Variant: '{i}'\n" + f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) try: @@ -166,30 +167,30 @@ def get_hierarchy(): rarity = i.split("_")[2] except IndexError: raise Exception( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"There is a naming issue with the following Attribute/Variant: '{i}'\n" - f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"There is a naming issue with the following Attribute/Variant: '{i}'\n" + f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) - allAttDataList[i] = {"name": name, "number": number, "rarity": rarity} - return allAttDataList + all_att_data_list[i] = {"name": name, "number": number, "rarity": rarity} + return all_att_data_list - variantMetaData = attributeData(attributeVariants) + variant_meta_data = attribute_data(attribute_variants) hierarchy = {} - for i in attributeCollections1: - colParLong = list(bpy.data.collections[str(i)].children) - colParShort = {} - for x in colParLong: - colParShort[x.name] = None - hierarchy[i] = colParShort + for i in attribute_collections1: + col_par_long = list(bpy.data.collections[str(i)].children) + col_par_short = {} + for x in col_par_long: + col_par_short[x.name] = None + hierarchy[i] = col_par_short for a in hierarchy: for b in hierarchy[a]: - for x in variantMetaData: - if str(x) == str(b): - (hierarchy[a])[b] = variantMetaData[x] + for x in variant_meta_data: + if str(x)==str(b): + (hierarchy[a])[b] = variant_meta_data[x] return hierarchy @@ -205,17 +206,17 @@ def get_combinations(): """ hierarchy = get_hierarchy() - hierarchyByNum = [] + hierarchy_by_num = [] for i in hierarchy: # Ignore Collections with nothing in them - if len(hierarchy[i]) != 0: - hierarchyByNum.append(len(hierarchy[i])) + if len(hierarchy[i])!=0: + hierarchy_by_num.append(len(hierarchy[i])) else: print(f"The following collection has been identified as empty: {i}") combinations = 1 - for i in hierarchyByNum: + for i in hierarchy_by_num: combinations = combinations * i return combinations @@ -230,7 +231,7 @@ def get_combinations(): # This section is provided for transparency. The accuracy of the rarity values you set in your .blend file as outlined # in the README.md file are dependent on the maxNFTs, and the maximum number of combinations of your NFT collection. -def check_Scene(): # Not complete +def check_scene(): # Not complete """ Checks if Blender file Scene follows the Blend_My_NFTs conventions. If not, raises error with all instances of violations. @@ -247,10 +248,11 @@ def check_Scene(): # Not complete script_ignore_exists = True except KeyError: raise TypeError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"Add a Script_Ignore collection to your Blender scene and ensure the name is exactly 'Script_Ignore'. For more information, " - f"see:" - f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"Add a Script_Ignore collection to your Blender scene and ensure the name is exactly 'Script_Ignore'. " + f"For more information, " + f"see:" + f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{TextColors.RESET}" ) hierarchy = get_hierarchy() @@ -259,84 +261,77 @@ def check_Scene(): # Not complete # attribute_naming_conventions -def check_Rarity(hierarchy, DNAListFormatted, save_path): +def check_rarity(hierarchy, dna_list_formatted, save_path): """Checks rarity percentage of each Variant, then sends it to RarityData.json in NFT_Data folder.""" - DNAList = [] - for i in DNAListFormatted: - DNAList.append(list(i.keys())[0]) - - numNFTsGenerated = len(DNAList) - - numDict = defaultdict(list) - + dna_list = [list(i.keys())[0] for i in dna_list_formatted] + num_nfts_generated = len(dna_list) + num_dict = defaultdict(list) hierarchy.keys() - for i in DNAList: - dnaSplitList = i.split("-") + for i in dna_list: + dna_split_list = i.split("-") - for j, k in zip(dnaSplitList, hierarchy.keys()): - numDict[k].append(j) + for j, k in zip(dna_split_list, hierarchy.keys()): + num_dict[k].append(j) - numDict = dict(numDict) + num_dict = dict(num_dict) - for i in numDict: - count = dict(Counter(numDict[i])) - numDict[i] = count + for i in num_dict: + count = dict(Counter(num_dict[i])) + num_dict[i] = count - fullNumName = {} + full_num_name = {} for i in hierarchy: - fullNumName[i] = {} + full_num_name[i] = {} for j in hierarchy[i]: - variantNum = hierarchy[i][j]["number"] + variant_num = hierarchy[i][j]["number"] - fullNumName[i][variantNum] = j + full_num_name[i][variant_num] = j - completeData = {} + complete_data = {} - for i, j in zip(fullNumName, numDict): + for i, j in zip(full_num_name, num_dict): x = {} - - for k in fullNumName[i]: - - for l in numDict[j]: + for k in full_num_name[i]: + for l in num_dict[j]: if l == k: - name = fullNumName[i][k] - num = numDict[j][l] - x[name] = [(str(round(((num / numNFTsGenerated) * 100), 2)) + "%"), str(num)] + name = full_num_name[i][k] + num = num_dict[j][l] + x[name] = [(str(round(((num / num_nfts_generated) * 100), 2)) + "%"), str(num)] - completeData[i] = x + complete_data[i] = x print( - f"\n{bcolors.OK}\n" - f"Rarity Checker is active. These are the percentages for each variant per attribute you set in your .blend file:" - f"\n{bcolors.RESET}" + f"\n{TextColors.OK}\n" + f"Rarity Checker is active. These are the percentages for each variant per attribute you set in your .blend" + f" file: \n{TextColors.RESET}" ) - for i in completeData: + for i in complete_data: print(i + ":") - for j in completeData[i]: - print(" " + j + ": " + completeData[i][j][0] + " Occurrences: " + completeData[i][j][1]) + for j in complete_data[i]: + print(" " + j + ": " + complete_data[i][j][0] + " Occurrences: " + complete_data[i][j][1]) - jsonMetaData = json.dumps(completeData, indent=1, ensure_ascii=True) + json_meta_data = json.dumps(complete_data, indent=1, ensure_ascii=True) with open(os.path.join(save_path, "RarityData.json"), 'w') as outfile: - outfile.write(jsonMetaData + '\n') + outfile.write(json_meta_data + '\n') path = os.path.join(save_path, "RarityData.json") - print(bcolors.OK + f"Rarity Data has been saved to {path}." + bcolors.RESET) + print(TextColors.OK + f"Rarity Data has been saved to {path}." + TextColors.RESET) -def check_Duplicates(DNAListFormatted): - """Checks if there are duplicates in DNAList before NFTRecord.json is sent to JSON file.""" - DNAList = [] - for i in DNAListFormatted: - DNAList.append(list(i.keys())[0]) +def check_duplicates(dna_list_formatted): + """Checks if there are duplicates in dna_list before NFTRecord.json is sent to JSON file.""" + dna_list = [] + for i in dna_list_formatted: + dna_list.append(list(i.keys())[0]) duplicates = 0 seen = set() - for x in DNAList: + for x in dna_list: if x in seen: print(x) duplicates += 1 @@ -345,7 +340,7 @@ def check_Duplicates(DNAListFormatted): print(f"\nNFTRecord.json contains {duplicates} duplicate NFT DNA.") -def check_FailedBatches(batch_json_save_path): +def check_failed_batches(batch_json_save_path): fail_state = False failed_batch = None failed_dna = None @@ -356,10 +351,10 @@ def check_FailedBatches(batch_json_save_path): for i in batch_folders: batch = json.load(open(os.path.join(batch_json_save_path, i))) - NFTs_in_Batch = batch["NFTs_in_Batch"] + nfts_in_batch = batch["NFTs_in_Batch"] if "Generation Save" in batch: dna_generated = batch["Generation Save"][-1]["DNA Generated"] - if dna_generated is not None and dna_generated < NFTs_in_Batch: + if dna_generated is not None and dna_generated < nfts_in_batch: fail_state = True failed_batch = int(i.removeprefix("Batch").removesuffix(".json")) failed_dna = dna_generated @@ -368,74 +363,80 @@ def check_FailedBatches(batch_json_save_path): # Raise Errors: -def raise_Error_numBatches(maxNFTs, nftsPerBatch): +def raise_error_num_batches(max_nfts, nfts_per_batch): """Checks if number of Batches is less than maxNFTs, if not raises error.""" try: - numBatches = maxNFTs / nftsPerBatch - return numBatches + num_batches = max_nfts / nfts_per_batch + return num_batches except ZeroDivisionError: raise ZeroDivisionError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"The number of NFTs per Batch must be greater than ZERO." - f"Please review your Blender scene and ensure it follows " - f"the naming conventions and scene structure. For more information, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"The number of NFTs per Batch must be greater than ZERO." + f"Please review your Blender scene and ensure it follows " + f"the naming conventions and scene structure. For more information, " + f"see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure" + f"\n{TextColors.RESET}" ) -def raise_Error_ZeroCombinations(): +def raise_error_zero_combinations(): """Checks if combinations is greater than 0, if so, raises error.""" if get_combinations() == 0: raise ValueError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"The number of all possible combinations is ZERO. Please review your Blender scene and ensure it follows " - f"the naming conventions and scene structure. For more information, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"The number of all possible combinations is ZERO. Please review your Blender scene and ensure it " + f"follows the naming conventions and scene structure. For more information, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure" + f"\n{TextColors.RESET}" ) -def raise_Error_numBatchesGreaterThan(numBatches): - if numBatches < 1: +def raise_error_num_batches_greater_then(num_batches): + if num_batches < 1: raise ValueError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"The number of Batches is less than 1. Please review your Blender scene and ensure it follows " - f"the naming conventions and scene structure. For more information, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"The number of Batches is less than 1. Please review your Blender scene and ensure it follows " + f"the naming conventions and scene structure. For more information, " + f"see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure" + f"\n{TextColors.RESET}" ) # Raise Warnings: -def raise_Warning_maxNFTs(nftsPerBatch, collectionSize): +def raise_warning_max_nfts(nfts_per_batch, collection_size): """ Prints warning if nftsPerBatch is greater than collectionSize. """ - if nftsPerBatch > collectionSize: + if nfts_per_batch > collection_size: raise ValueError( - f"\n{bcolors.WARNING}Blend_My_NFTs Warning:\n" - f"The number of NFTs Per Batch you set is smaller than the NFT Collection Size you set.\n{bcolors.RESET}" + f"\n{TextColors.WARNING}Blend_My_NFTs Warning:\n" + f"The number of NFTs Per Batch you set is smaller than the NFT Collection Size you set." + f"\n{TextColors.RESET}" ) -def raise_Warning_collectionSize(DNAList, collectionSize): +def raise_warning_collection_size(dna_list, collection_size): """ Prints warning if BMNFTs cannot generate requested number of NFTs from a given collectionSize. """ - if len(DNAList) < collectionSize: - print(f"\n{bcolors.WARNING} \nWARNING: \n" - f"Blend_My_NFTs cannot generate {collectionSize} NFTs." - f" Only {len(DNAList)} NFT DNA were generated." + if len(dna_list) < collection_size: + print(f"\n{TextColors.WARNING} \nWARNING: \n" + f"Blend_My_NFTs cannot generate {collection_size} NFTs." + f" Only {len(dna_list)} NFT DNA were generated." f"\nThis might be for a number of reasons:" - f"\n a) Rarity is preventing combinations from being generated (See https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n" - f"\n b) Logic is preventing combinations from being generated (See https://github.com/torrinworx/Blend_My_NFTs#logic).\n" - f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or Attributes to increase the recommended collection size.\n" - f"\n{bcolors.RESET}") + f"\n a) Rarity is preventing combinations from being generated (See " + f"https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n " + f"\n b) Logic is preventing combinations from being generated (See " + f"https://github.com/torrinworx/Blend_My_NFTs#logic).\n " + f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or " + f"Attributes to increase the recommended collection size.\n " + f"\n{TextColors.RESET}") # ======== LOADING ANIMATION ======== # @@ -458,16 +459,16 @@ class Loader: self._thread = Thread(target=self._animate, daemon=True) self.steps = [ - " [== ]", - " [ == ]", - " [ == ]", - " [ == ]", - " [ == ]", - " [ ==]", - " [ == ]", - " [ == ]", - " [ == ]", - " [ == ]", + " [== ]", + " [ == ]", + " [ == ]", + " [ == ]", + " [ == ]", + " [ ==]", + " [ == ]", + " [ == ]", + " [ == ]", + " [ == ]", ] self.done = False diff --git a/main/Logic.py b/main/Logic.py index 1271b58..2cc316d 100644 --- a/main/Logic.py +++ b/main/Logic.py @@ -5,7 +5,7 @@ import bpy import random import collections -from .Helpers import bcolors, removeList, remove_file_by_extension, save_result +from .Helpers import TextColors, removeList, remove_file_by_extension, save_result def reconstructDNA(deconstructedDNA): @@ -117,9 +117,9 @@ def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, resul variantNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1) except IndexError: raise IndexError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"An issue was found within the Attribute collection '{a}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{bcolors.RESET}" + f"see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) else: @@ -127,9 +127,9 @@ def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, resul variantNum = random.choices(number_List_Of_i, k=1) except IndexError: raise IndexError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"An issue was found within the Attribute collection '{a}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{bcolors.RESET}" + f"see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) deconstructed_DNA[int(attribute_index)] = str(variantNum[0]) diff --git a/main/Material_Generator.py b/main/Material_Generator.py index 0218838..8c52f35 100644 --- a/main/Material_Generator.py +++ b/main/Material_Generator.py @@ -7,7 +7,7 @@ import bpy import json import random -from .Helpers import bcolors, removeList, remove_file_by_extension, save_result +from .Helpers import TextColors def select_material(materialList, variant, enableRarity): @@ -25,8 +25,8 @@ def select_material(materialList, variant, enableRarity): material_rarity_percent = materialList[material] rarity_List_Of_i.append(float(material_rarity_percent)) - print(f"MATERIAL_LIST_OF_I:{material_List_Of_i}") - print(f"RARITY_LIST_OF_I:{rarity_List_Of_i}") + # print(f"MATERIAL_LIST_OF_I:{material_List_Of_i}") + # print(f"RARITY_LIST_OF_I:{rarity_List_Of_i}") for b in rarity_List_Of_i: if b == 0: @@ -42,9 +42,9 @@ def select_material(materialList, variant, enableRarity): selected_material = random.choices(material_List_Of_i, weights=rarity_List_Of_i, k=1) except IndexError: raise IndexError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"An issue was found within the Material List of the Variant collection '{variant}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{bcolors.RESET}" + f"see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) else: @@ -52,9 +52,9 @@ def select_material(materialList, variant, enableRarity): selected_material = random.choices(material_List_Of_i, k=1) except IndexError: raise IndexError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"An issue was found within the Material List of the Variant collection '{variant}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{bcolors.RESET}" + f"see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) diff --git a/main/Refactorer.py b/main/Refactorer.py index 98eb970..0faa07f 100644 --- a/main/Refactorer.py +++ b/main/Refactorer.py @@ -6,7 +6,7 @@ import os import json import shutil -from .Helpers import bcolors, removeList, remove_file_by_extension +from .Helpers import TextColors, removeList, remove_file_by_extension def reformatNFTCollection(refactor_panel_input): From 3715b30afa409a8c35de9300f08ad348c75ccadb Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Wed, 24 Aug 2022 11:10:22 -0400 Subject: [PATCH 06/12] pep8 formatting DNA_Generator.py --- main/DNA_Generator.py | 300 +++++++++++++++++++++++------------------- main/Intermediate.py | 2 +- 2 files changed, 168 insertions(+), 134 deletions(-) diff --git a/main/DNA_Generator.py b/main/DNA_Generator.py index f1241ac..6d62d87 100644 --- a/main/DNA_Generator.py +++ b/main/DNA_Generator.py @@ -1,219 +1,246 @@ # Purpose: # This file generates NFT DNA based on a .blend file scene structure and exports NFTRecord.json. -import bpy import os import time import json import random +import traceback from functools import partial from . import Logic, Material_Generator, Helpers +from .Helpers import TextColors -def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials, materialsFile, enable_debug): +def generate_nft_dna( + collection_size, + enable_rarity, + enable_logic, + logic_file, + enable_materials, + materials_file, + enable_debug +): """ - Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the DNAList. + Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the dna_list. """ hierarchy = Helpers.get_hierarchy() # DNA random, Rarity and Logic methods: - DataDictionary = {} + data_dictionary = {} - def createDNArandom(hierarchy): + def create_dna_random(hierarchy): """Creates a single DNA randomly without Rarity or Logic.""" - dnaStr = "" - dnaStrList = [] - listOptionVariant = [] + dna_str = "" + dna_str_list = [] + list_option_variant = [] for i in hierarchy: - numChild = len(hierarchy[i]) - possibleNums = list(range(1, numChild + 1)) - listOptionVariant.append(possibleNums) + num_child = len(hierarchy[i]) + possible_nums = list(range(1, num_child + 1)) + list_option_variant.append(possible_nums) - for i in listOptionVariant: - randomVariantNum = random.choices(i, k=1) - str1 = ''.join(str(e) for e in randomVariantNum) - dnaStrList.append(str1) + for i in list_option_variant: + random_variant_num = random.choices(i, k=1) + str1 = ''.join(str(e) for e in random_variant_num) + dna_str_list.append(str1) - for i in dnaStrList: + for i in dna_str_list: num = "-" + str(i) - dnaStr += num + dna_str += num - dna = ''.join(dnaStr.split('-', 1)) + dna = ''.join(dna_str.split('-', 1)) return str(dna) - def createDNArarity(hierarchy): + def create_dna_rarity(hierarchy): """ - Sorts through DataDictionary and appropriately weights each variant based on their rarity percentage set in Blender + Sorts through data_dictionary and appropriately weights each variant based on their rarity percentage set in Blender ("rarity" in DNA_Generator). Then """ - singleDNA = "" + single_dna = "" for i in hierarchy: - number_List_Of_i = [] - rarity_List_Of_i = [] - ifZeroBool = None + number_list_of_i = [] + rarity_list_of_i = [] + if_zero_bool = None for k in hierarchy[i]: number = hierarchy[i][k]["number"] - number_List_Of_i.append(number) + number_list_of_i.append(number) rarity = hierarchy[i][k]["rarity"] - rarity_List_Of_i.append(float(rarity)) + rarity_list_of_i.append(float(rarity)) - for x in rarity_List_Of_i: + for x in rarity_list_of_i: if x == 0: - ifZeroBool = True + if_zero_bool = True elif x != 0: - ifZeroBool = False + if_zero_bool = False try: - if ifZeroBool: - variantByNum = random.choices(number_List_Of_i, k=1) - elif not ifZeroBool: - variantByNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1) + if if_zero_bool: + variant_by_num = random.choices(number_list_of_i, k=1) + elif not if_zero_bool: + variant_by_num = random.choices(number_list_of_i, weights=rarity_list_of_i, k=1) except IndexError: raise IndexError( - f"\n{Helpers.TextColors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Attribute collection '{i}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{Helpers.TextColors.RESET}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"An issue was found within the Attribute collection '{i}'. For more information on Blend_My_NFTs " + f"compatible scenes, see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) - singleDNA += "-" + str(variantByNum[0]) - singleDNA = ''.join(singleDNA.split('-', 1)) - return singleDNA + single_dna += "-" + str(variant_by_num[0]) + single_dna = ''.join(single_dna.split('-', 1)) + return single_dna def singleCompleteDNA(): """ This function applies Rarity and Logic to a single DNA created by createDNASingle() if Rarity or Logic specified """ - singleDNA = "" - if not enableRarity: - singleDNA = createDNArandom(hierarchy) + single_dna = "" + if not enable_rarity: + single_dna = create_dna_random(hierarchy) # print("============") - # print(f"Original DNA: {singleDNA}") - if enableRarity: - singleDNA = createDNArarity(hierarchy) - # print(f"Rarity DNA: {singleDNA}") + # print(f"Original DNA: {single_dna}") + if enable_rarity: + single_dna = create_dna_rarity(hierarchy) + # print(f"Rarity DNA: {single_dna}") - if enableLogic: - singleDNA = Logic.logicafyDNAsingle(hierarchy, singleDNA, logicFile, enableRarity, enableMaterials) - # print(f"Logic DNA: {singleDNA}") + if enable_logic: + single_dna = Logic.logicafyDNAsingle(hierarchy, single_dna, logic_file, enable_rarity, enable_materials) + # print(f"Logic DNA: {single_dna}") - if enableMaterials: - singleDNA = Material_Generator.apply_materials(hierarchy, singleDNA, materialsFile, enableRarity) - # print(f"Materials DNA: {singleDNA}") + if enable_materials: + single_dna = Material_Generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity) + # print(f"Materials DNA: {single_dna}") # print("============\n") - return singleDNA + return single_dna - def create_DNAList(): - """Creates DNAList. Loops through createDNARandom() and applies Rarity, and Logic while checking if all DNA are unique""" - DNASetReturn = set() + def create_dna_list(): + """ + Creates dna_list. Loops through createDNARandom() and applies Rarity, and Logic while checking if all DNA + are unique. + """ + dna_set_return = set() - for i in range(collectionSize): - dnaPushToList = partial(singleCompleteDNA) + for i in range(collection_size): + dna_push_to_list = partial(singleCompleteDNA) - DNASetReturn |= {''.join([dnaPushToList()]) for _ in range(collectionSize - len(DNASetReturn))} + dna_set_return |= {''.join([dna_push_to_list()]) for _ in range(collection_size - len(dna_set_return))} - DNAListUnformatted = list(DNASetReturn) + dna_list_non_formatted = list(dna_set_return) - DNAListFormatted = [] - DNA_Counter = 1 - for i in DNAListUnformatted: - DNAListFormatted.append({ + dna_list_formatted = [] + dna_counter = 1 + for i in dna_list_non_formatted: + dna_list_formatted.append({ i: { - "Complete": False, - "Order_Num": DNA_Counter + "complete": False, + "order_num": dna_counter } }) - DNA_Counter += 1 + dna_counter += 1 - return DNAListFormatted + return dna_list_formatted - DNAList = create_DNAList() + dna_list = create_dna_list() # Messages: - Helpers.raise_warning_collection_size(DNAList, collectionSize) + Helpers.raise_warning_collection_size(dna_list, collection_size) # Data stored in batchDataDictionary: - DataDictionary["numNFTsGenerated"] = len(DNAList) - DataDictionary["hierarchy"] = hierarchy - DataDictionary["DNAList"] = DNAList + data_dictionary["num_nfts_generated"] = len(dna_list) + data_dictionary["hierarchy"] = hierarchy + data_dictionary["dna_list"] = dna_list - return DataDictionary + return data_dictionary -def makeBatches(collectionSize, nftsPerBatch, save_path, batch_json_save_path): +def make_batches( + collection_size, + nfts_per_batch, + save_path, + batch_json_save_path +): """ Sorts through all the batches and outputs a given number of batches depending on collectionSize and nftsPerBatch. These files are then saved as Batch#.json files to batch_json_save_path """ # Clears the Batch Data folder of Batches: - batchList = os.listdir(batch_json_save_path) - if batchList: - for i in batchList: + batch_list = os.listdir(batch_json_save_path) + if batch_list: + for i in batch_list: batch = os.path.join(batch_json_save_path, i) if os.path.exists(batch): os.remove( os.path.join(batch_json_save_path, i) ) - Blend_My_NFTs_Output = os.path.join(save_path, "Blend_My_NFTs Output", "NFT_Data") - NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json") - DataDictionary = json.load(open(NFTRecord_save_path)) + blend_my_nf_ts_output = os.path.join(save_path, "Blend_My_NFTs Output", "NFT_Data") + nft_record_save_path = os.path.join(blend_my_nf_ts_output, "NFTRecord.json") + data_dictionary = json.load(open(nft_record_save_path)) - numNFTsGenerated = DataDictionary["numNFTsGenerated"] - hierarchy = DataDictionary["hierarchy"] - DNAList = DataDictionary["DNAList"] + num_nfts_generated = data_dictionary["num_nfts_generated"] + hierarchy = data_dictionary["hierarchy"] + dna_list = data_dictionary["dna_list"] - numBatches = collectionSize // nftsPerBatch - remainder_dna = collectionSize % nftsPerBatch + num_batches = collection_size // nfts_per_batch + remainder_dna = collection_size % nfts_per_batch if remainder_dna > 0: - numBatches += 1 + num_batches += 1 - print(f"To generate batches of {nftsPerBatch} DNA sequences per batch, with a total of {numNFTsGenerated}" - f" possible NFT DNA sequences, the number of batches generated will be {numBatches}") + print(f"To generate batches of {nfts_per_batch} DNA sequences per batch, with a total of {num_nfts_generated}" + f" possible NFT DNA sequences, the number of batches generated will be {num_batches}") batches_dna_list = [] - for i in range(numBatches): - BatchDNAList = [] - if i != range(numBatches)[-1]: - BatchDNAList = list(DNAList[0:nftsPerBatch]) - batches_dna_list.append(BatchDNAList) + for i in range(num_batches): + if i != range(num_batches)[-1]: + batch_dna_list = list(dna_list[0:nfts_per_batch]) + batches_dna_list.append(batch_dna_list) - DNAList = [x for x in DNAList if x not in BatchDNAList] + dna_list = [x for x in dna_list if x not in batch_dna_list] else: - BatchDNAList = DNAList + batch_dna_list = dna_list - batchDictionary = { - "NFTs_in_Batch": int(len(BatchDNAList)), + batch_dictionary = { + "nfts_in_batch": int(len(batch_dna_list)), "hierarchy": hierarchy, - "BatchDNAList": BatchDNAList + "batch_dna_list": batch_dna_list } - batchDictionary = json.dumps(batchDictionary, indent=1, ensure_ascii=True) + batch_dictionary = json.dumps(batch_dictionary, indent=1, ensure_ascii=True) with open(os.path.join(batch_json_save_path, f"Batch{i + 1}.json"), "w") as outfile: - outfile.write(batchDictionary) + outfile.write(batch_dictionary) -def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, enableMaterials, - materialsFile, Blend_My_NFTs_Output, batch_json_save_path, enable_debug): +def send_to_record( + collection_size, + nfts_per_batch, + save_path, + enable_rarity, + enable_logic, + logic_file, + enable_materials, + materials_file, + blend_my_nfts_output, + batch_json_save_path, + enable_debug +): """ - Creates NFTRecord.json file and sends "batchDataDictionary" to it. NFTRecord.json is a permanent record of all DNA + Creates NFTRecord.json file and sends "batch_data_dictionary" to it. NFTRecord.json is a permanent record of all DNA you've generated with all attribute variants. If you add new variants or attributes to your .blend file, other scripts need to reference this .json file to generate new DNA and make note of the new attributes and variants to prevent - repeate DNA. + repeat DNA. """ # Checking Scene is compatible with BMNFTs: @@ -221,66 +248,73 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e # Messages: print( - f"\n{Helpers.TextColors.OK}======== Creating NFT Data ========{Helpers.TextColors.RESET}" - f"\nGenerating {collectionSize} NFT DNA" + f"\n{TextColors.OK}======== Creating NFT Data ========{TextColors.RESET}" + f"\nGenerating {collection_size} NFT DNA" ) - if not enableRarity and not enableLogic: + if not enable_rarity and not enable_logic: print( - f"{Helpers.TextColors.OK}NFT DNA will be determined randomly, no special properties or parameters are " - f"applied.\n{Helpers.TextColors.RESET}") + f"{TextColors.OK}NFT DNA will be determined randomly, no special properties or parameters are " + f"applied.\n{TextColors.RESET}") - if enableRarity: + if enable_rarity: print( - f"{Helpers.TextColors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account." - f"{Helpers.TextColors.RESET}" + f"{TextColors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account." + f"{TextColors.RESET}" ) - if enableLogic: + if enable_logic: print( - f"{Helpers.TextColors.OK}Logic is ON. {len(list(logicFile.keys()))} rules detected and applied." - f"{Helpers.TextColors.RESET}" + f"{TextColors.OK}Logic is ON. {len(list(logic_file.keys()))} rules detected and applied." + f"{TextColors.RESET}" ) time_start = time.time() def create_nft_data(): try: - DataDictionary = generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials, - materialsFile, enable_debug) - NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json") + data_dictionary = generate_nft_dna( + collection_size, + enable_rarity, + enable_logic, + logic_file, + enable_materials, + materials_file, + enable_debug, + ) + nft_record_save_path = os.path.join(blend_my_nfts_output, "NFTRecord.json") # Checks: - - Helpers.raise_warning_max_nfts(nftsPerBatch, collectionSize) - Helpers.check_duplicates(DataDictionary["DNAList"]) + Helpers.raise_warning_max_nfts(nfts_per_batch, collection_size) + Helpers.check_duplicates(data_dictionary["dna_list"]) Helpers.raise_error_zero_combinations() - if enableRarity: - Helpers.check_rarity(DataDictionary["hierarchy"], DataDictionary["DNAList"], + if enable_rarity: + Helpers.check_rarity(data_dictionary["hierarchy"], data_dictionary["dna_list"], os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data")) except FileNotFoundError: raise FileNotFoundError( - f"\n{Helpers.TextColors.ERROR}Blend_My_NFTs Error:\n" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " - f"the naming conventions and scene structure. For more information, " - f"see:\n{Helpers.TextColors.RESET}" + f"the naming conventions and scene structure. For more information, see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) finally: loading.stop() try: - ledger = json.dumps(DataDictionary, indent=1, ensure_ascii=True) - with open(NFTRecord_save_path, 'w') as outfile: + ledger = json.dumps(data_dictionary, indent=1, ensure_ascii=True) + with open(nft_record_save_path, 'w') as outfile: outfile.write(ledger + '\n') print( - f"\n{Helpers.TextColors.OK}Blend_My_NFTs Success:\n" - f"{len(DataDictionary['DNAList'])} NFT DNA saved to {NFTRecord_save_path}. NFT DNA Successfully created.\n{Helpers.TextColors.RESET}") + f"\n{TextColors.OK}Blend_My_NFTs Success:\n" + f"{len(data_dictionary['DNAList'])} NFT DNA saved to {nft_record_save_path}. NFT DNA Successfully " + f"created.\n{TextColors.RESET}") - except: + except Exception: + traceback.print_exc() raise ( f"\n{Helpers.TextColors.ERROR}Blend_My_NFTs Error:\n" f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " @@ -292,7 +326,7 @@ def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, e # Loading Animation: loading = Helpers.Loader(f'Creating NFT DNA...', '').start() create_nft_data() - makeBatches(collectionSize, nftsPerBatch, save_path, batch_json_save_path) + make_batches(collection_size, nfts_per_batch, save_path, batch_json_save_path) loading.stop() time_end = time.time() diff --git a/main/Intermediate.py b/main/Intermediate.py index 6068a47..1dd6f43 100644 --- a/main/Intermediate.py +++ b/main/Intermediate.py @@ -44,7 +44,7 @@ def send_To_Record_JSON(input, reverse_order=False): num += 1 - DNA_Generator.send_To_Record_JSON(input.collectionSize, + DNA_Generator.send_to_record(input.collectionSize, input.nftsPerBatch, input.save_path, input.enableRarity, From 7c389e15dfbea0d6c378eb361fc9b4c6a11468f6 Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Wed, 24 Aug 2022 11:49:09 -0400 Subject: [PATCH 07/12] Renaming files --- ...a_UIList.py => custom_metadata_ui_list.py} | 0 UILists/{Logic_UIList.py => logic_ui_list.py} | 0 __init__.py | 213 ++++-- main/Exporter.py | 586 ---------------- main/{DNA_Generator.py => dna_generator.py} | 30 +- main/exporter.py | 641 ++++++++++++++++++ main/{HeadlessUtil.py => headless_util.py} | 6 +- main/{Helpers.py => helpers.py} | 0 main/{Intermediate.py => intermediate.py} | 6 +- main/{Logic.py => logic.py} | 4 +- ...ial_Generator.py => material_generator.py} | 2 +- main/{Metadata.py => metadata_templates.py} | 2 +- main/{Refactorer.py => refactorer.py} | 2 +- 13 files changed, 807 insertions(+), 685 deletions(-) rename UILists/{Custom_Metadata_UIList.py => custom_metadata_ui_list.py} (100%) rename UILists/{Logic_UIList.py => logic_ui_list.py} (100%) delete mode 100644 main/Exporter.py rename main/{DNA_Generator.py => dna_generator.py} (92%) create mode 100644 main/exporter.py rename main/{HeadlessUtil.py => headless_util.py} (91%) rename main/{Helpers.py => helpers.py} (100%) rename main/{Intermediate.py => intermediate.py} (95%) rename main/{Logic.py => logic.py} (98%) rename main/{Material_Generator.py => material_generator.py} (98%) rename main/{Metadata.py => metadata_templates.py} (98%) rename main/{Refactorer.py => refactorer.py} (96%) diff --git a/UILists/Custom_Metadata_UIList.py b/UILists/custom_metadata_ui_list.py similarity index 100% rename from UILists/Custom_Metadata_UIList.py rename to UILists/custom_metadata_ui_list.py diff --git a/UILists/Logic_UIList.py b/UILists/logic_ui_list.py similarity index 100% rename from UILists/Logic_UIList.py rename to UILists/logic_ui_list.py diff --git a/__init__.py b/__init__.py index 0e33683..7de5fdb 100644 --- a/__init__.py +++ b/__init__.py @@ -36,33 +36,33 @@ sys.path.append(os.path.dirname(os.path.realpath(__file__))) # Local file imports: from main import \ - Helpers, \ - DNA_Generator, \ - Exporter, \ - HeadlessUtil, \ - Intermediate, \ - Logic, \ - Material_Generator, \ - Metadata, \ - Refactorer + helpers, \ + dna_generator, \ + exporter, \ + headless_util, \ + intermediate, \ + logic, \ + material_generator, \ + metadata_templates, \ + refactorer from UILists import \ - Custom_Metadata_UIList, \ - Logic_UIList + custom_metadata_ui_list, \ + logic_ui_list if "bpy" in locals(): modules = { - "Helpers": Helpers, - "DNA_Generator": DNA_Generator, - "Exporter": Exporter, - "HeadlessUtil": HeadlessUtil, - "Intermediate": Intermediate, - "Logic": Logic, - "Material_Generator": Material_Generator, - "Metadata": Metadata, - "Refactorer": Refactorer, - "Custom_Metadata_UIList": Custom_Metadata_UIList, - "Logic_UIList": Logic_UIList, + "helpers": helpers, + "dna_generator": dna_generator, + "exporter": exporter, + "headless_util": headless_util, + "intermediate": intermediate, + "logic": logic, + "material_generator": material_generator, + "metadata_templates": metadata_templates, + "refactorer": refactorer, + "custom_metadata_ui_list": custom_metadata_ui_list, + "logic_ui_list": logic_ui_list, } for i in modules: @@ -86,7 +86,7 @@ def Refresh_UI(dummy1, dummy2): global combinations global recommended_limit - combinations = (Helpers.get_combinations()) + combinations = (helpers.get_combinations()) recommended_limit = int(round(combinations / 2)) # Add panel classes that require refresh to this refresh_panels tuple: @@ -277,7 +277,7 @@ def runAsHeadless(): f"animationEnum={settings.animationEnum}\n" f"modelBool={str(settings.modelBool)}\n" f"modelEnum={settings.modelEnum}\n" - f"batchToGenerate={str(settings.batchToGenerate)}\n" + f"batch_to_generate={str(settings.batchToGenerate)}\n" f"cardanoMetaDataBool={str(settings.cardanoMetaDataBool)}\n" f"cardano_description={settings.cardano_description}\n" f"erc721MetaData={str(settings.erc721MetaData)}\n" @@ -291,7 +291,7 @@ def runAsHeadless(): ) print(output) - args, parser = HeadlessUtil.getPythonArgs() + args, parser = headless_util.getPythonArgs() settings = bpy.context.scene.input_tool @@ -342,13 +342,13 @@ def runAsHeadless(): input.batch_json_save_path = args.batch_data_path if args.operation == 'create-dna': - Intermediate.send_To_Record_JSON(input) + intermediate.send_To_Record_JSON(input) elif args.operation == 'generate-nfts': - Intermediate.render_and_save_NFTs(input) + intermediate.render_and_save_NFTs(input) elif args.operation == 'refactor-batches': - Refactorer.reformatNFTCollection(input) + refactorer.reformatNFTCollection(input) # ======== User input Property Group ======== # @@ -357,8 +357,16 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): nftName: bpy.props.StringProperty(name="NFT Name") - collectionSize: bpy.props.IntProperty(name="NFT Collection Size", default=1, min=1) # max=(combinations - offset) - nftsPerBatch: bpy.props.IntProperty(name="NFTs Per Batch", default=1, min=1) # max=(combinations - offset) + collectionSize: bpy.props.IntProperty( + name="NFT Collection Size", + default=1, + min=1 + ) # max=(combinations - offset) + nftsPerBatch: bpy.props.IntProperty( + name="NFTs Per Batch", + default=1, + min=1 + ) # max=(combinations - offset) save_path: bpy.props.StringProperty( name="Save Path", @@ -368,10 +376,16 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="DIR_PATH" ) - enableRarity: bpy.props.BoolProperty(name="Enable Rarity") + enableRarity: bpy.props.BoolProperty( + name="Enable Rarity" + ) - enableLogic: bpy.props.BoolProperty(name="Enable Logic") - enable_Logic_Json: bpy.props.BoolProperty(name="Use Logic.json instead") + enableLogic: bpy.props.BoolProperty( + name="Enable Logic" + ) + enable_Logic_Json: bpy.props.BoolProperty( + name="Use Logic.json instead" + ) logicFile: bpy.props.StringProperty( name="Logic File Path", description="Path where Logic.json is located.", @@ -380,7 +394,9 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="FILE_PATH" ) - enableMaterials: bpy.props.BoolProperty(name="Enable Materials") + enableMaterials: bpy.props.BoolProperty( + name="Enable Materials" + ) materialsFile: bpy.props.StringProperty( name="Materials File", description="Path where Materials.json is located.", @@ -390,7 +406,9 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ) # Generate NFTs Panel: - imageBool: bpy.props.BoolProperty(name="Image") + imageBool: bpy.props.BoolProperty( + name="Image" + ) imageEnum: bpy.props.EnumProperty( name="Image File Format", description="Select Image file format", @@ -400,7 +418,9 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - animationBool: bpy.props.BoolProperty(name="Animation") + animationBool: bpy.props.BoolProperty( + name="Animation" + ) animationEnum: bpy.props.EnumProperty( name="Animation File Format", description="Select Animation file format", @@ -414,7 +434,9 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - modelBool: bpy.props.BoolProperty(name="3D Model") + modelBool: bpy.props.BoolProperty( + name="3D Model" + ) modelEnum: bpy.props.EnumProperty( name="3D Model File Format", description="Select 3D Model file format", @@ -432,20 +454,37 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - batchToGenerate: bpy.props.IntProperty(name="Batch To Generate", default=1, - min=1) + batchToGenerate: bpy.props.IntProperty( + name="Batch To Generate", + default=1, + min=1 + ) # Refactor Batches & Create Metadata Panel: - cardanoMetaDataBool: bpy.props.BoolProperty(name="Cardano Cip") - cardano_description: bpy.props.StringProperty(name="Cardano description") + cardanoMetaDataBool: bpy.props.BoolProperty( + name="Cardano Cip" + ) + cardano_description: bpy.props.StringProperty( + name="Cardano description" + ) - solanaMetaDataBool: bpy.props.BoolProperty(name="Solana Metaplex") - solana_description: bpy.props.StringProperty(name="Solana description") + solanaMetaDataBool: bpy.props.BoolProperty( + name="Solana Metaplex" + ) + solana_description: bpy.props.StringProperty( + name="Solana description" + ) - erc721MetaData: bpy.props.BoolProperty(name="ERC721") - erc721_description: bpy.props.StringProperty(name="ERC721 description") + erc721MetaData: bpy.props.BoolProperty( + name="ERC721" + ) + erc721_description: bpy.props.StringProperty( + name="ERC721 description" + ) - enableCustomFields: bpy.props.BoolProperty(name="Enable Custom Metadata Fields") + enableCustomFields: bpy.props.BoolProperty( + name="Enable Custom Metadata Fields" + ) customfieldsFile: bpy.props.StringProperty( name="Custom Fields File", description="Path where Custom_Fields.json is located.", @@ -457,27 +496,55 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): # TODO: Add 'Other' panel inputs to Headless functionality. # Other Panel: - enableAutoSave: bpy.props.BoolProperty(name="Auto Save Before Generation", - description="Automatically saves your Blender file when 'Generate NFTs & Create Metadata' button is clicked") + enableAutoSave: bpy.props.BoolProperty( + name="Auto Save Before Generation", + description="Automatically saves your Blender file when 'Generate NFTs & Create Metadata' button is clicked" + ) - enableAutoShutdown: bpy.props.BoolProperty(name="Auto Shutdown", - description="Automatically shuts down your computer after a Batch is finished Generating") + enableAutoShutdown: bpy.props.BoolProperty( + name="Auto Shutdown", + description="Automatically shuts down your computer after a Batch is finished Generating" + ) - specify_timeBool: bpy.props.BoolProperty(name="Shutdown in a Given Amount of Time", - description="Wait a given amount of time after a Batch is generated before Automatic Shutdown") - hours: bpy.props.IntProperty(default=0, min=0) - minutes: bpy.props.IntProperty(default=0, min=0) + specify_timeBool: bpy.props.BoolProperty( + name="Shutdown in a Given Amount of Time", + description="Wait a given amount of time after a Batch is generated before Automatic Shutdown" + ) + hours: bpy.props.IntProperty( + default=0, min=0 + ) + minutes: bpy.props.IntProperty( + default=0, min=0 + ) - emailNotificationBool: bpy.props.BoolProperty(name="Email Notifications", - description="Receive Email Notifications from Blender once a batch is finished generating") - sender_from: bpy.props.StringProperty(name="From", default="from@example.com") - email_password: bpy.props.StringProperty(name="Password", subtype='PASSWORD') - receiver_to: bpy.props.StringProperty(name="To", default="to@example.com") + emailNotificationBool: bpy.props.BoolProperty( + name="Email Notifications", + description="Receive Email Notifications from Blender once a batch is finished generating" + ) + sender_from: bpy.props.StringProperty( + name="From", + default="from@example.com" + ) + email_password: bpy.props.StringProperty( + name="Password", + subtype='PASSWORD' + ) + receiver_to: bpy.props.StringProperty( + name="To", + default="to@example.com" + ) - enable_debug: bpy.props.BoolProperty(name="Enable Debug Mode", description="Allows you to run Blend_My_NFTs without generating any content files and includes more console information.") + enable_debug: bpy.props.BoolProperty( + name="Enable Debug Mode", + description="Allows you to run Blend_My_NFTs without generating any content files and includes more " + "console information." + ) # API Panel properties: - apiKey: bpy.props.StringProperty(name="API Key", subtype='PASSWORD') # Test code for future features + apiKey: bpy.props.StringProperty( + name="API Key", + subtype='PASSWORD' + ) # Test code for future features # ======== Main Operators ======== # @@ -500,7 +567,7 @@ class createData(bpy.types.Operator): self.report({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.") - Intermediate.send_To_Record_JSON(input) + intermediate.send_To_Record_JSON(input) self.report({'INFO'}, f"NFT Data created!") return {"FINISHED"} @@ -523,7 +590,7 @@ class exportNFTs(bpy.types.Operator): input = getBMNFTData() # Handling Custom Fields UIList input: - Intermediate.render_and_save_NFTs(input) + intermediate.render_and_save_NFTs(input) self.report({'INFO'}, f"All NFTs generated for batch {input.batchToGenerate}!") @@ -545,7 +612,7 @@ class resume_failed_batch(bpy.types.Operator): file_name = os.path.join(_batch_json_save_path, "Batch{}.json".format(_batchToGenerate)) batchData = json.load(open(file_name)) - _fail_state, _failed_batch, _failed_dna, _failed_dna_index = Helpers.check_failed_batches(_batch_json_save_path) + _fail_state, _failed_batch, _failed_dna, _failed_dna_index = helpers.check_failed_batches(_batch_json_save_path) render_settings = batchData["Generation Save"][-1]["Render_Settings"] @@ -553,7 +620,7 @@ class resume_failed_batch(bpy.types.Operator): nftName=render_settings["nftName"], save_path=_save_path, nftsPerBatch=render_settings["nftsPerBatch"], - batchToGenerate=render_settings["batchToGenerate"], + batchToGenerate=render_settings["batch_to_generate"], collectionSize=render_settings["collectionSize"], Blend_My_NFTs_Output=_Blend_My_NFTs_Output, @@ -608,7 +675,7 @@ class resume_failed_batch(bpy.types.Operator): custom_Fields=render_settings["custom_Fields"], ) - Exporter.render_and_save_NFTs(input) + exporter.render_and_save_nfts(input) self.report({'INFO'}, f"Resuming Failed Batch Generation!") @@ -628,7 +695,7 @@ class refactor_Batches(bpy.types.Operator): def execute(self, context): # Passing info to main functions for refactoring: - Refactorer.reformatNFTCollection(getBMNFTData()) + refactorer.reformatNFTCollection(getBMNFTData()) return {"FINISHED"} def invoke(self, context, event): @@ -682,7 +749,7 @@ class export_settings(bpy.types.Operator): f"modelEnum={settings.modelEnum}\n" "\n" "#Batch to generate\n" - f"batchToGenerate={str(settings.batchToGenerate)}\n" + f"batch_to_generate={str(settings.batchToGenerate)}\n" "\n" "#Metadata Format\n" f"cardanoMetaDataBool={str(settings.cardanoMetaDataBool)}\n" @@ -878,14 +945,14 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): row.operator("custom_metadata_fields_uilist.clear_list", icon="X") row = layout.row() - row.prop(input_tool_scene, "batchToGenerate") + row.prop(input_tool_scene, "batch_to_generate") save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) Blend_My_NFTs_Output = os.path.join(save_path, "Blend_My_NFTs Output", "NFT_Data") batch_json_save_path = os.path.join(Blend_My_NFTs_Output, "Batch_Data") nftBatch_save_path = os.path.join(save_path, "Blend_My_NFTs Output", "Generated NFT Batches") - fail_state, failed_batch, failed_dna, failed_dna_index = Helpers.check_failed_batches(batch_json_save_path) + fail_state, failed_batch, failed_dna, failed_dna_index = helpers.check_failed_batches(batch_json_save_path) if fail_state: row = layout.row() @@ -1037,7 +1104,7 @@ classes = ( BMNFTS_PT_GenerateNFTs, BMNFTS_PT_Refactor, BMNFTS_PT_Other, - ) + Custom_Metadata_UIList.classes_Custom_Metadata_UIList + Logic_UIList.classes_Logic_UIList + ) + custom_metadata_ui_list.classes_Custom_Metadata_UIList + logic_ui_list.classes_Logic_UIList def register(): @@ -1047,10 +1114,10 @@ def register(): bpy.types.Scene.input_tool = bpy.props.PointerProperty(type=BMNFTS_PGT_Input_Properties) bpy.types.Scene.custom_metadata_fields = CollectionProperty( - type=Custom_Metadata_UIList.CUSTOM_custom_metadata_fields_objectCollection) + type=custom_metadata_ui_list.CUSTOM_custom_metadata_fields_objectCollection) bpy.types.Scene.custom_metadata_fields_index = IntProperty() - bpy.types.Scene.logic_fields = CollectionProperty(type=Logic_UIList.CUSTOM_logic_objectCollection) + bpy.types.Scene.logic_fields = CollectionProperty(type=logic_ui_list.CUSTOM_logic_objectCollection) bpy.types.Scene.logic_fields_index = IntProperty() diff --git a/main/Exporter.py b/main/Exporter.py deleted file mode 100644 index 283d70c..0000000 --- a/main/Exporter.py +++ /dev/null @@ -1,586 +0,0 @@ -# Purpose: -# This file takes a given Batch created by DNA_Generator.py and tells blender to render the image or export a 3D model -# to the NFT_Output folder. - -import bpy -import os -import ssl -import time -import json -import smtplib -import datetime -import platform - -from .Helpers import TextColors, Loader -from .Metadata import createCardanoMetadata, createSolanaMetaData, createErc721MetaData - - -# Save info -def save_batch(batch, file_name): - saved_batch = json.dumps(batch, indent=1, ensure_ascii=True) - - with open(os.path.join(file_name), 'w') as outfile: - outfile.write(saved_batch + '\n') - - -def save_generation_state(input): - """Saves date and time of generation start, and generation types; Images, Animations, 3D Models, and the file types for each.""" - file_name = os.path.join(input.batch_json_save_path, "Batch{}.json".format(input.batchToGenerate)) - batch = json.load(open(file_name)) - - CURRENT_TIME = datetime.datetime.now().strftime("%H:%M:%S") - CURRENT_DATE = datetime.datetime.now().strftime("%d/%m/%Y") - LOCAL_TIMEZONE = str(datetime.datetime.now(datetime.timezone.utc)) - - if "Generation Save" in batch: - batch_save_number = int(batch[f"Generation Save"].index(batch[f"Generation Save"][-1])) - else: - batch_save_number = 0 - - batch["Generation Save"] = list() - batch["Generation Save"].append({ - "Batch Save Number": batch_save_number + 1, - "DNA Generated": None, - "Generation Start Date and Time": [CURRENT_TIME, CURRENT_DATE, LOCAL_TIMEZONE], - "Render_Settings": { - "nftName": input.nftName, - "save_path": input.save_path, - "nftsPerBatch": input.nftsPerBatch, - "batchToGenerate": input.batchToGenerate, - "collectionSize": input.collectionSize, - - "Blend_My_NFTs_Output": input.Blend_My_NFTs_Output, - "batch_json_save_path": input.batch_json_save_path, - "nftBatch_save_path": input.nftBatch_save_path, - - "enableImages": input.enableImages, - "imageFileFormat": input.imageFileFormat, - - "enableAnimations": input.enableAnimations, - "animationFileFormat": input.animationFileFormat, - - "enableModelsBlender": input.enableModelsBlender, - "modelFileFormat": input.modelFileFormat, - - "enableCustomFields": input.enableCustomFields, - - "cardanoMetaDataBool": input.cardanoMetaDataBool, - "solanaMetaDataBool": input.solanaMetaDataBool, - "erc721MetaData": input.erc721MetaData, - - "cardano_description": input.cardano_description, - "solana_description": input.solana_description, - "erc721_description": input.erc721_description, - - "enableMaterials": input.enableMaterials, - "materialsFile": input.materialsFile, - - "enableLogic": input.enableLogic, - "enable_Logic_Json": input.enable_Logic_Json, - "logicFile": input.logicFile, - - "enableRarity": input.enableRarity, - - "enableAutoShutdown": input.enableAutoShutdown, - - "specify_timeBool": input.specify_timeBool, - "hours": input.hours, - "minutes": input.minutes, - - "emailNotificationBool": input.emailNotificationBool, - "sender_from": input.sender_from, - "email_password": input.email_password, - "receiver_to": input.receiver_to, - - "custom_Fields": input.custom_Fields, - }, - }) - - save_batch(batch, file_name) - - -def save_completed(full_single_dna, a, x, batch_json_save_path, batchToGenerate): - """Saves progress of rendering to batch.json file.""" - - file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batchToGenerate)) - batch = json.load(open(file_name)) - index = batch["BatchDNAList"].index(a) - batch["BatchDNAList"][index][full_single_dna]["Complete"] = True - batch["Generation Save"][-1]["DNA Generated"] = x - - save_batch(batch, file_name) - - -# Exporter functions: -def getBatchData(batchToGenerate, batch_json_save_path): - """ - Retrieves a given batches data determined by renderBatch in config.py - """ - - file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batchToGenerate)) - batch = json.load(open(file_name)) - - NFTs_in_Batch = batch["NFTs_in_Batch"] - hierarchy = batch["hierarchy"] - BatchDNAList = batch["BatchDNAList"] - - return NFTs_in_Batch, hierarchy, BatchDNAList - - -def render_and_save_NFTs(input): - """ - Renders the NFT DNA in a Batch#.json, where # is renderBatch in config.py. Turns off the viewport camera and - the render camera for all items in hierarchy. - """ - - time_start_1 = time.time() - - # If failed Batch is detected and user is resuming its generation: - if input.fail_state: - print(f"{TextColors.ERROR}\nResuming Batch #{input.failed_batch}\n{TextColors.RESET}") - NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.failed_batch, input.batch_json_save_path) - for a in range(input.failed_dna): - del BatchDNAList[0] - x = input.failed_dna + 1 - - # If user is generating the normal way: - else: - print(f"\nGenerating Batch #{input.batchToGenerate}\n") - NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.batchToGenerate, input.batch_json_save_path) - save_generation_state(input) - x = 1 - - if input.enableMaterials: - materialsFile = json.load(open(input.materialsFile)) - - for a in BatchDNAList: - full_single_dna = list(a.keys())[0] - Order_Num = a[full_single_dna]['Order_Num'] - - # Material handling: - if input.enableMaterials: - single_dna, material_dna = full_single_dna.split(':') - - if not input.enableMaterials: - single_dna = full_single_dna - - def match_DNA_to_Variant(single_dna): - """ - Matches each DNA number separated by "-" to its attribute, then its variant. - """ - - listAttributes = list(hierarchy.keys()) - listDnaDecunstructed = single_dna.split('-') - dnaDictionary = {} - - for i, j in zip(listAttributes, listDnaDecunstructed): - dnaDictionary[i] = j - - for x in dnaDictionary: - for k in hierarchy[x]: - kNum = hierarchy[x][k]["number"] - if kNum == dnaDictionary[x]: - dnaDictionary.update({x: k}) - return dnaDictionary - - def match_materialDNA_to_Material(single_dna, material_dna, materialsFile): - """ - Matches the Material DNA to it's selected Materials unless a 0 is present meaning no material for that variant was selected. - """ - listAttributes = list(hierarchy.keys()) - listDnaDecunstructed = single_dna.split('-') - listMaterialDNADeconstructed = material_dna.split('-') - - full_dna_dict = {} - - for attribute, variant, material in zip(listAttributes, listDnaDecunstructed, listMaterialDNADeconstructed): - - for var in hierarchy[attribute]: - if hierarchy[attribute][var]['number'] == variant: - variant = var - - if material != '0': # If material is not empty - for variant_m in materialsFile: - if variant == variant_m: - # Getting Materials name from Materials index in the Materials List - materials_list = list(materialsFile[variant_m]["Material List"].keys()) - - material = materials_list[int(material) - 1] # Subtract 1 because '0' means empty mat - break - - full_dna_dict[variant] = material - - return full_dna_dict - - metadataMaterialDict = {} - - if input.enableMaterials: - materialdnaDictionary = match_materialDNA_to_Material(single_dna, material_dna, materialsFile) - - for var_mat in list(materialdnaDictionary.keys()): - if materialdnaDictionary[var_mat] != '0': - if not materialsFile[var_mat]['Variant Objects']: - """ - If objects to apply material to not specified, apply to all objects in Variant collection. - """ - metadataMaterialDict[var_mat] = materialdnaDictionary[var_mat] - - for obj in bpy.data.collections[var_mat].all_objects: - selected_object = bpy.data.objects.get(obj.name) - selected_object.active_material = bpy.data.materials[materialdnaDictionary[var_mat]] - - if materialsFile[var_mat]['Variant Objects']: - """ - If objects to apply material to are specified, apply material only to objects specified withing the Variant collection. - """ - metadataMaterialDict[var_mat] = materialdnaDictionary[var_mat] - - for obj in materialsFile[var_mat]['Variant Objects']: - selected_object = bpy.data.objects.get(obj) - selected_object.active_material = bpy.data.materials[materialdnaDictionary[var_mat]] - - # Turn off render camera and viewport camera for all collections in hierarchy - for i in hierarchy: - for j in hierarchy[i]: - try: - bpy.data.collections[j].hide_render = True - bpy.data.collections[j].hide_viewport = True - except KeyError: - raise TypeError( - f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes to " - f"your .blned file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read your scene." - f"For more information see:{TextColors.RESET}" - f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - - dnaDictionary = match_DNA_to_Variant(single_dna) - name = input.nftName + "_" + str(Order_Num) - - # Change Text Object in Scene to match DNA string: - # Variables that can be used: full_single_dna, name, Order_Num - # ob = bpy.data.objects['Text'] # Object name - # ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob - - print(f"\n{TextColors.OK}======== Generating NFT {x}/{NFTs_in_Batch}: {name} ========{TextColors.RESET}") - print(f"\nVariants selected:") - print(f"{dnaDictionary}") - if input.enableMaterials: - print(f"\nMaterials selected:") - print(f"{materialdnaDictionary}") - - print(f"\nDNA Code:{full_single_dna}") - - for c in dnaDictionary: - collection = dnaDictionary[c] - if collection != '0': - bpy.data.collections[collection].hide_render = False - bpy.data.collections[collection].hide_viewport = False - - time_start_2 = time.time() - - # Main paths for batch sub-folders: - batchFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate)) - - imageFolder = os.path.join(batchFolder, "Images") - animationFolder = os.path.join(batchFolder, "Animations") - modelFolder = os.path.join(batchFolder, "Models") - BMNFT_metaData_Folder = os.path.join(batchFolder, "BMNFT_metadata") - - imagePath = os.path.join(imageFolder, name) - animationPath = os.path.join(animationFolder, name) - modelPath = os.path.join(modelFolder, name) - - cardanoMetadataPath = os.path.join(batchFolder, "Cardano_metadata") - solanaMetadataPath = os.path.join(batchFolder, "Solana_metadata") - erc721MetadataPath = os.path.join(batchFolder, "Erc721_metadata") - - - def check_failed_exists(file_path): - # Delete a file if a fail state is detected and if the file being re-generated already exists. Prevents - # animations from corrupting. - - if input.fail_state: - if os.path.exists(file_path): - os.remove(file_path) - - # Generation/Rendering: - if input.enableImages: - - print(f"{TextColors.OK}---Image---{TextColors.RESET}") - - image_render_time_start = time.time() - - check_failed_exists(imagePath) - - def render_image(): - if not os.path.exists(imageFolder): - os.makedirs(imageFolder) - - bpy.context.scene.render.filepath = imagePath - bpy.context.scene.render.image_settings.file_format = input.imageFileFormat - bpy.ops.render.render(write_still=True) - - # Loading Animation: - loading = Loader(f'Rendering Image {x}/{NFTs_in_Batch}...', '').start() - render_image() - loading.stop() - - image_render_time_end = time.time() - - print( - f"{TextColors.OK}Rendered image in {image_render_time_end - image_render_time_start}s.\n{TextColors.RESET}" - ) - - if input.enableAnimations: - print(f"{TextColors.OK}---Animation---{TextColors.RESET}") - - animation_render_time_start = time.time() - - check_failed_exists(animationPath) - - def render_animation(): - if not os.path.exists(animationFolder): - os.makedirs(animationFolder) - - if input.animationFileFormat == "MP4": - bpy.context.scene.render.filepath = animationPath - bpy.context.scene.render.image_settings.file_format = "FFMPEG" - - bpy.context.scene.render.ffmpeg.format = 'MPEG4' - bpy.context.scene.render.ffmpeg.codec = 'H264' - bpy.ops.render.render(animation=True) - - elif input.animationFileFormat == 'PNG': - if not os.path.exists(animationPath): - os.makedirs(animationPath) - - bpy.context.scene.render.filepath = os.path.join(animationPath, name) - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat - bpy.ops.render.render(animation=True) - - elif input.animationFileFormat == 'TIFF': - if not os.path.exists(animationPath): - os.makedirs(animationPath) - - bpy.context.scene.render.filepath = os.path.join(animationPath, name) - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat - bpy.ops.render.render(animation=True) - - else: - bpy.context.scene.render.filepath = animationPath - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat - bpy.ops.render.render(animation=True) - - # Loading Animation: - loading = Loader(f'Rendering Animation {x}/{NFTs_in_Batch}...', '').start() - render_animation() - loading.stop() - - animation_render_time_end = time.time() - - print( - f"{TextColors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s.\n{TextColors.RESET}" - ) - - if input.enableModelsBlender: - print(f"{TextColors.OK}---3D Model---{TextColors.RESET}") - - model_generation_time_start = time.time() - - def generate_models(): - if not os.path.exists(modelFolder): - os.makedirs(modelFolder) - - for i in dnaDictionary: - coll = dnaDictionary[i] - if coll != '0': - for obj in bpy.data.collections[coll].all_objects: - obj.select_set(True) - - for obj in bpy.data.collections['Script_Ignore'].all_objects: - obj.select_set(True) - - # Remove objects from 3D model export: - # remove_objects: list = [ - # ] - # - # for obj in bpy.data.objects: - # if obj.name in remove_objects: - # obj.select_set(False) - - if input.modelFileFormat == 'GLB': - check_failed_exists(f"{modelPath}.glb") - bpy.ops.export_scene.gltf(filepath=f"{modelPath}.glb", - check_existing=True, - export_format='GLB', - export_keep_originals=True, - use_selection=True) - if input.modelFileFormat == 'GLTF_SEPARATE': - check_failed_exists(f"{modelPath}.gltf") - check_failed_exists(f"{modelPath}.bin") - bpy.ops.export_scene.gltf(filepath=f"{modelPath}", - check_existing=True, - export_format='GLTF_SEPARATE', - export_keep_originals=True, - use_selection=True) - if input.modelFileFormat == 'GLTF_EMBEDDED': - check_failed_exists(f"{modelPath}.gltf") - bpy.ops.export_scene.gltf(filepath=f"{modelPath}.gltf", - check_existing=True, - export_format='GLTF_EMBEDDED', - export_keep_originals=True, - use_selection=True) - elif input.modelFileFormat == 'FBX': - check_failed_exists(f"{modelPath}.fbx") - bpy.ops.export_scene.fbx(filepath=f"{modelPath}.fbx", - check_existing=True, - use_selection=True) - elif input.modelFileFormat == 'OBJ': - check_failed_exists(f"{modelPath}.obj") - bpy.ops.export_scene.obj(filepath=f"{modelPath}.obj", - check_existing=True, - use_selection=True, ) - elif input.modelFileFormat == 'X3D': - check_failed_exists(f"{modelPath}.x3d") - bpy.ops.export_scene.x3d(filepath=f"{modelPath}.x3d", - check_existing=True, - use_selection=True) - elif input.modelFileFormat == 'STL': - check_failed_exists(f"{modelPath}.stl") - bpy.ops.export_mesh.stl(filepath=f"{modelPath}.stl", - check_existing=True, - use_selection=True) - elif input.modelFileFormat == 'VOX': - check_failed_exists(f"{modelPath}.vox") - bpy.ops.export_vox.some_data(filepath=f"{modelPath}.vox") - - # Loading Animation: - loading = Loader(f'Generating 3D model {x}/{NFTs_in_Batch}...', '').start() - generate_models() - loading.stop() - - model_generation_time_end = time.time() - - print( - f"{TextColors.OK}Generated 3D model in {model_generation_time_end - model_generation_time_start}s.\n{TextColors.RESET}" - ) - - # Generating Metadata: - if input.cardanoMetaDataBool: - if not os.path.exists(cardanoMetadataPath): - os.makedirs(cardanoMetadataPath) - createCardanoMetadata(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict, - input.custom_Fields, - input.enableCustomFields, input.cardano_description, cardanoMetadataPath) - - if input.solanaMetaDataBool: - if not os.path.exists(solanaMetadataPath): - os.makedirs(solanaMetadataPath) - createSolanaMetaData(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict, - input.custom_Fields, - input.enableCustomFields, input.solana_description, solanaMetadataPath) - - if input.erc721MetaData: - if not os.path.exists(erc721MetadataPath): - os.makedirs(erc721MetadataPath) - createErc721MetaData(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict, - input.custom_Fields, - input.enableCustomFields, input.erc721_description, erc721MetadataPath) - - if not os.path.exists(BMNFT_metaData_Folder): - os.makedirs(BMNFT_metaData_Folder) - - for b in dnaDictionary: - if dnaDictionary[b] == "0": - dnaDictionary[b] = "Empty" - - metaDataDict = {"name": name, "NFT_DNA": a, "NFT_Variants": dnaDictionary, - "Material_Attributes": metadataMaterialDict} - - jsonMetaData = json.dumps(metaDataDict, indent=1, ensure_ascii=True) - - with open(os.path.join(BMNFT_metaData_Folder, "Data_" + name + ".json"), 'w') as outfile: - outfile.write(jsonMetaData + '\n') - - print(f"Completed {name} render in {time.time() - time_start_2}s") - - save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batchToGenerate) - - x += 1 - - for i in hierarchy: - for j in hierarchy[i]: - bpy.data.collections[j].hide_render = False - bpy.data.collections[j].hide_viewport = False - - batch_complete_time = time.time() - time_start_1 - - print(f"\nAll NFTs successfully generated and sent to {input.nftBatch_save_path}" - f"\nCompleted all renders in Batch{input.batchToGenerate}.json in {batch_complete_time}s\n") - - batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1, - "Average time per generation": batch_complete_time / x - 1} - - batch_infoFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate), "batch_info.json") - save_batch(batch_info, batch_infoFolder) - - # Send Email that Batch is complete: - if input.emailNotificationBool: - port = 465 # For SSL - smtp_server = "smtp.gmail.com" - sender_email = input.sender_from # Enter your address - receiver_email = input.receiver_to # Enter receiver address - password = input.email_password - - # Get batch info for message: - if input.fail_state: - batch = input.fail_state - batchData = getBatchData(input.failed_batch, input.batch_json_save_path) - - else: - batchData = getBatchData(input.batchToGenerate, input.batch_json_save_path) - - batch = input.batchToGenerate - - generation_time = str(datetime.timedelta(seconds=batch_complete_time)) - - message = f"""\ - Subject: Batch {batch} completed {x - 1} NFTs in {generation_time} (h:m:s) - - Generation Time: - {generation_time.split(':')[0]} Hours, {generation_time.split(':')[1]} Minutes, {generation_time.split(':')[2]} Seconds - Batch Data: - - {batchData} - - This message was sent from an instance of the Blend_My_NFTs Blender add-on. - """ - - context = ssl.create_default_context() - with smtplib.SMTP_SSL(smtp_server, port, context=context) as server: - server.login(sender_email, password) - server.sendmail(sender_email, receiver_email, message) - - # Automatic Shutdown: - # If user selects automatic shutdown but did not specify time after Batch completion - def shutdown(time): - plateform = platform.system() - - if plateform == "Windows": - os.system(f"shutdown /s /t {time}") - if plateform == "Darwin": - os.system(f"shutdown /s /t {time}") - - if input.enableAutoShutdown and not input.specify_timeBool: - shutdown(0) - - # If user selects automatic shutdown and specify time after Batch completion - if input.enableAutoShutdown and input.specify_timeBool: - hours = (int(input.hours) / 60) / 60 - minutes = int(input.minutes) / 60 - total_sleep_time = hours + minutes - - # time.sleep(total_sleep_time) - - shutdown(total_sleep_time) diff --git a/main/DNA_Generator.py b/main/dna_generator.py similarity index 92% rename from main/DNA_Generator.py rename to main/dna_generator.py index 6d62d87..289d56b 100644 --- a/main/DNA_Generator.py +++ b/main/dna_generator.py @@ -7,8 +7,8 @@ import json import random import traceback from functools import partial -from . import Logic, Material_Generator, Helpers -from .Helpers import TextColors +from . import logic, material_generator, helpers +from .helpers import TextColors def generate_nft_dna( @@ -24,7 +24,7 @@ def generate_nft_dna( Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the dna_list. """ - hierarchy = Helpers.get_hierarchy() + hierarchy = helpers.get_hierarchy() # DNA random, Rarity and Logic methods: data_dictionary = {} @@ -110,11 +110,11 @@ def generate_nft_dna( # print(f"Rarity DNA: {single_dna}") if enable_logic: - single_dna = Logic.logicafyDNAsingle(hierarchy, single_dna, logic_file, enable_rarity, enable_materials) + single_dna = logic.logicafyDNAsingle(hierarchy, single_dna, logic_file, enable_rarity, enable_materials) # print(f"Logic DNA: {single_dna}") if enable_materials: - single_dna = Material_Generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity) + single_dna = material_generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity) # print(f"Materials DNA: {single_dna}") # print("============\n") @@ -153,7 +153,7 @@ def generate_nft_dna( # Messages: - Helpers.raise_warning_collection_size(dna_list, collection_size) + helpers.raise_warning_collection_size(dna_list, collection_size) # Data stored in batchDataDictionary: data_dictionary["num_nfts_generated"] = len(dna_list) @@ -244,7 +244,7 @@ def send_to_record( """ # Checking Scene is compatible with BMNFTs: - Helpers.check_scene() + helpers.check_scene() # Messages: print( @@ -285,12 +285,12 @@ def send_to_record( nft_record_save_path = os.path.join(blend_my_nfts_output, "NFTRecord.json") # Checks: - Helpers.raise_warning_max_nfts(nfts_per_batch, collection_size) - Helpers.check_duplicates(data_dictionary["dna_list"]) - Helpers.raise_error_zero_combinations() + helpers.raise_warning_max_nfts(nfts_per_batch, collection_size) + helpers.check_duplicates(data_dictionary["dna_list"]) + helpers.raise_error_zero_combinations() if enable_rarity: - Helpers.check_rarity(data_dictionary["hierarchy"], data_dictionary["dna_list"], + helpers.check_rarity(data_dictionary["hierarchy"], data_dictionary["dna_list"], os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data")) except FileNotFoundError: @@ -316,15 +316,15 @@ def send_to_record( except Exception: traceback.print_exc() raise ( - f"\n{Helpers.TextColors.ERROR}Blend_My_NFTs Error:\n" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " f"the naming conventions and scene structure. For more information, " - f"see:\n{Helpers.TextColors.RESET}" + f"see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) # Loading Animation: - loading = Helpers.Loader(f'Creating NFT DNA...', '').start() + loading = helpers.Loader(f'Creating NFT DNA...', '').start() create_nft_data() make_batches(collection_size, nfts_per_batch, save_path, batch_json_save_path) loading.stop() @@ -332,5 +332,5 @@ def send_to_record( time_end = time.time() print( - f"{Helpers.TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{Helpers.TextColors.RESET}" + f"{TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{TextColors.RESET}" ) diff --git a/main/exporter.py b/main/exporter.py new file mode 100644 index 0000000..0c640e0 --- /dev/null +++ b/main/exporter.py @@ -0,0 +1,641 @@ +# Purpose: +# This file takes a given Batch created by dna_generator.py and tells blender to render the image or export a 3D model +# to the NFT_Output folder. + +import bpy +import os +import ssl +import time +import json +import smtplib +import datetime +import platform + +from .helpers import TextColors, Loader +from .metadata_templates import createCardanoMetadata, createSolanaMetaData, createErc721MetaData + + +# Save info +def save_batch(batch, file_name): + saved_batch = json.dumps(batch, indent=1, ensure_ascii=True) + + with open(os.path.join(file_name), 'w') as outfile: + outfile.write(saved_batch + '\n') + + +def save_generation_state(input): + """ + Saves date and time of generation start, and generation types; Images, Animations, 3D Models, and the file types for + each. + """ + file_name = os.path.join(input.batch_json_save_path, "Batch{}.json".format(input.batchToGenerate)) + batch = json.load(open(file_name)) + + current_time = datetime.datetime.now().strftime("%H:%M:%S") + current_date = datetime.datetime.now().strftime("%d/%m/%Y") + local_timezone = str(datetime.datetime.now(datetime.timezone.utc)) + + if "Generation Save" in batch: + batch_save_number = int(batch[f"Generation Save"].index(batch[f"Generation Save"][-1])) + else: + batch_save_number = 0 + + batch["Generation Save"] = list() + batch["Generation Save"].append({ + "Batch Save Number": batch_save_number + 1, + "DNA Generated": None, + "Generation Start Date and Time": [current_time, current_date, local_timezone], + "Render_Settings": { + "nftName": input.nftName, + "save_path": input.save_path, + "nftsPerBatch": input.nftsPerBatch, + "batch_to_generate": input.batchToGenerate, + "collectionSize": input.collectionSize, + + "Blend_My_NFTs_Output": input.Blend_My_NFTs_Output, + "batch_json_save_path": input.batch_json_save_path, + "nftBatch_save_path": input.nftBatch_save_path, + + "enableImages": input.enableImages, + "imageFileFormat": input.imageFileFormat, + + "enableAnimations": input.enableAnimations, + "animationFileFormat": input.animationFileFormat, + + "enableModelsBlender": input.enableModelsBlender, + "modelFileFormat": input.modelFileFormat, + + "enableCustomFields": input.enableCustomFields, + + "cardanoMetaDataBool": input.cardanoMetaDataBool, + "solanaMetaDataBool": input.solanaMetaDataBool, + "erc721MetaData": input.erc721MetaData, + + "cardano_description": input.cardano_description, + "solana_description": input.solana_description, + "erc721_description": input.erc721_description, + + "enableMaterials": input.enableMaterials, + "materialsFile": input.materialsFile, + + "enableLogic": input.enableLogic, + "enable_Logic_Json": input.enable_Logic_Json, + "logicFile": input.logicFile, + + "enableRarity": input.enableRarity, + + "enableAutoShutdown": input.enableAutoShutdown, + + "specify_timeBool": input.specify_timeBool, + "hours": input.hours, + "minutes": input.minutes, + + "emailNotificationBool": input.emailNotificationBool, + "sender_from": input.sender_from, + "email_password": input.email_password, + "receiver_to": input.receiver_to, + + "custom_Fields": input.custom_Fields, + }, + }) + + save_batch(batch, file_name) + + +def save_completed(full_single_dna, a, x, batch_json_save_path, batch_to_generate): + """Saves progress of rendering to batch.json file.""" + + file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batch_to_generate)) + batch = json.load(open(file_name)) + index = batch["BatchDNAList"].index(a) + batch["BatchDNAList"][index][full_single_dna]["Complete"] = True + batch["Generation Save"][-1]["DNA Generated"] = x + + save_batch(batch, file_name) + + +# Exporter functions: +def get_batch_data(batch_to_generate, batch_json_save_path): + """ + Retrieves a given batches data determined by renderBatch in config.py + """ + + file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batch_to_generate)) + batch = json.load(open(file_name)) + + nfts_in_batch = batch["nfts_in_batch"] + hierarchy = batch["hierarchy"] + batch_dna_list = batch["batch_dna_list"] + + return nfts_in_batch, hierarchy, batch_dna_list + + +def render_and_save_nfts(input): + """ + Renders the NFT DNA in a Batch#.json, where # is renderBatch in config.py. Turns off the viewport camera and + the render camera for all items in hierarchy. + """ + + time_start_1 = time.time() + + # If failed Batch is detected and user is resuming its generation: + if input.fail_state: + print(f"{TextColors.ERROR}\nResuming Batch #{input.failed_batch}\n{TextColors.RESET}") + nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.failed_batch, input.batch_json_save_path) + for a in range(input.failed_dna): + del batch_dna_list[0] + x = input.failed_dna + 1 + + # If user is generating the normal way: + else: + print(f"\nGenerating Batch #{input.batchToGenerate}\n") + nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.batchToGenerate, input.batch_json_save_path) + save_generation_state(input) + x = 1 + + if input.enableMaterials: + materials_file = json.load(open(input.materialsFile)) + + for a in batch_dna_list: + full_single_dna = list(a.keys())[0] + order_num = a[full_single_dna]['order_num'] + + # Material handling: + if input.enableMaterials: + single_dna, material_dna = full_single_dna.split(':') + + if not input.enableMaterials: + single_dna = full_single_dna + + def match_dna_to_variant(single_dna): + """ + Matches each DNA number separated by "-" to its attribute, then its variant. + """ + + list_attributes = list(hierarchy.keys()) + list_dna_deconstructed = single_dna.split('-') + dna_dictionary = {} + + for i, j in zip(list_attributes, list_dna_deconstructed): + dna_dictionary[i] = j + + for x in dna_dictionary: + for k in hierarchy[x]: + k_num = hierarchy[x][k]["number"] + if k_num == dna_dictionary[x]: + dna_dictionary.update({x: k}) + return dna_dictionary + + def match_material_dna_to_material(single_dna, material_dna, materials_file): + """ + Matches the Material DNA to it's selected Materials unless a 0 is present meaning no material for that variant was selected. + """ + list_attributes = list(hierarchy.keys()) + list_dna_deconstructed = single_dna.split('-') + list_material_dna_deconstructed = material_dna.split('-') + + full_dna_dict = {} + + for attribute, variant, material in zip( + list_attributes, + list_dna_deconstructed, + list_material_dna_deconstructed + ): + + for var in hierarchy[attribute]: + if hierarchy[attribute][var]['number'] == variant: + variant = var + + if material != '0': # If material is not empty + for variant_m in materials_file: + if variant == variant_m: + # Getting Materials name from Materials index in the Materials List + materials_list = list(materials_file[variant_m]["Material List"].keys()) + + material = materials_list[int(material) - 1] # Subtract 1 because '0' means empty mat + break + + full_dna_dict[variant] = material + + return full_dna_dict + + metadata_material_dict = {} + + if input.enableMaterials: + material_dna_dictionary = match_material_dna_to_material(single_dna, material_dna, materials_file) + + for var_mat in list(material_dna_dictionary.keys()): + if material_dna_dictionary[var_mat]!='0': + if not materials_file[var_mat]['Variant Objects']: + """ + If objects to apply material to not specified, apply to all objects in Variant collection. + """ + metadata_material_dict[var_mat] = material_dna_dictionary[var_mat] + + for obj in bpy.data.collections[var_mat].all_objects: + selected_object = bpy.data.objects.get(obj.name) + selected_object.active_material = bpy.data.materials[material_dna_dictionary[var_mat]] + + if materials_file[var_mat]['Variant Objects']: + """ + If objects to apply material to are specified, apply material only to objects specified withing + the Variant collection. + """ + metadata_material_dict[var_mat] = material_dna_dictionary[var_mat] + + for obj in materials_file[var_mat]['Variant Objects']: + selected_object = bpy.data.objects.get(obj) + selected_object.active_material = bpy.data.materials[material_dna_dictionary[var_mat]] + + # Turn off render camera and viewport camera for all collections in hierarchy + for i in hierarchy: + for j in hierarchy[i]: + try: + bpy.data.collections[j].hide_render = True + bpy.data.collections[j].hide_viewport = True + except KeyError: + raise TypeError( + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes to " + f"your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read your " + f"scene. For more information see:{TextColors.RESET}" + f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + + dna_dictionary = match_dna_to_variant(single_dna) + name = input.nftName + "_" + str(order_num) + + # Change Text Object in Scene to match DNA string: + # Variables that can be used: full_single_dna, name, order_num + # ob = bpy.data.objects['Text'] # Object name + # ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob + + print(f"\n{TextColors.OK}======== Generating NFT {x}/{nfts_in_batch}: {name} ========{TextColors.RESET}") + print(f"\nVariants selected:") + print(f"{dna_dictionary}") + if input.enableMaterials: + print(f"\nMaterials selected:") + print(f"{material_dna_dictionary}") + + print(f"\nDNA Code:{full_single_dna}") + + for c in dna_dictionary: + collection = dna_dictionary[c] + if collection != '0': + bpy.data.collections[collection].hide_render = False + bpy.data.collections[collection].hide_viewport = False + + time_start_2 = time.time() + + # Main paths for batch sub-folders: + batch_folder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate)) + + image_folder = os.path.join(batch_folder, "Images") + animation_folder = os.path.join(batch_folder, "Animations") + model_folder = os.path.join(batch_folder, "Models") + bmnft_data_folder = os.path.join(batch_folder, "BMNFT_data") + + image_path = os.path.join(image_folder, name) + animation_path = os.path.join(animation_folder, name) + model_path = os.path.join(model_folder, name) + + cardano_metadata_path = os.path.join(batch_folder, "Cardano_metadata") + solana_metadata_path = os.path.join(batch_folder, "Solana_metadata") + erc721_metadata_path = os.path.join(batch_folder, "Erc721_metadata") + + def check_failed_exists(file_path): + """ + Delete a file if a fail state is detected and if the file being re-generated already exists. Prevents + animations from corrupting. + """ + if input.fail_state: + if os.path.exists(file_path): + os.remove(file_path) + + # Generation/Rendering: + if input.enableImages: + + print(f"{TextColors.OK}-------- Image --------{TextColors.RESET}") + + image_render_time_start = time.time() + + check_failed_exists(image_path) + + def render_image(): + if not os.path.exists(image_folder): + os.makedirs(image_folder) + + bpy.context.scene.render.filepath = image_path + bpy.context.scene.render.image_settings.file_format = input.imageFileFormat + bpy.ops.render.render(write_still=True) + + # Loading Animation: + loading = Loader(f'Rendering Image {x}/{nfts_in_batch}...', '').start() + render_image() + loading.stop() + + image_render_time_end = time.time() + + print( + f"{TextColors.OK}Rendered image in {image_render_time_end - image_render_time_start}s." + f"\n{TextColors.RESET}" + ) + + if input.enableAnimations: + print(f"{TextColors.OK}-------- Animation --------{TextColors.RESET}") + + animation_render_time_start = time.time() + + check_failed_exists(animation_path) + + def render_animation(): + if not os.path.exists(animation_folder): + os.makedirs(animation_folder) + + if input.animationFileFormat == "MP4": + bpy.context.scene.render.filepath = animation_path + bpy.context.scene.render.image_settings.file_format = "FFMPEG" + + bpy.context.scene.render.ffmpeg.format = 'MPEG4' + bpy.context.scene.render.ffmpeg.codec = 'H264' + bpy.ops.render.render(animation=True) + + elif input.animationFileFormat == 'PNG': + if not os.path.exists(animation_path): + os.makedirs(animation_path) + + bpy.context.scene.render.filepath = os.path.join(animation_path, name) + bpy.context.scene.render.image_settings.file_format = input.animationFileFormat + bpy.ops.render.render(animation=True) + + elif input.animationFileFormat == 'TIFF': + if not os.path.exists(animation_path): + os.makedirs(animation_path) + + bpy.context.scene.render.filepath = os.path.join(animation_path, name) + bpy.context.scene.render.image_settings.file_format = input.animationFileFormat + bpy.ops.render.render(animation=True) + + else: + bpy.context.scene.render.filepath = animation_path + bpy.context.scene.render.image_settings.file_format = input.animationFileFormat + bpy.ops.render.render(animation=True) + + # Loading Animation: + loading = Loader(f'Rendering Animation {x}/{nfts_in_batch}...', '').start() + render_animation() + loading.stop() + + animation_render_time_end = time.time() + + print( + f"{TextColors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s." + f"\n{TextColors.RESET}" + ) + + if input.enableModelsBlender: + print(f"{TextColors.OK}-------- 3D Model --------{TextColors.RESET}") + + model_generation_time_start = time.time() + + def generate_models(): + if not os.path.exists(model_folder): + os.makedirs(model_folder) + + for i in dna_dictionary: + coll = dna_dictionary[i] + if coll != '0': + for obj in bpy.data.collections[coll].all_objects: + obj.select_set(True) + + for obj in bpy.data.collections['Script_Ignore'].all_objects: + obj.select_set(True) + + # Remove objects from 3D model export: + # remove_objects: list = [ + # ] + # + # for obj in bpy.data.objects: + # if obj.name in remove_objects: + # obj.select_set(False) + + if input.modelFileFormat == 'GLB': + check_failed_exists(f"{model_path}.glb") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}.glb", + check_existing=True, + export_format='GLB', + export_keep_originals=True, + use_selection=True + ) + if input.modelFileFormat == 'GLTF_SEPARATE': + check_failed_exists(f"{model_path}.gltf") + check_failed_exists(f"{model_path}.bin") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}", + check_existing=True, + export_format='GLTF_SEPARATE', + export_keep_originals=True, + use_selection=True + ) + if input.modelFileFormat == 'GLTF_EMBEDDED': + check_failed_exists(f"{model_path}.gltf") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}.gltf", + check_existing=True, + export_format='GLTF_EMBEDDED', + export_keep_originals=True, + use_selection=True + ) + elif input.modelFileFormat == 'FBX': + check_failed_exists(f"{model_path}.fbx") + bpy.ops.export_scene.fbx( + filepath=f"{model_path}.fbx", + check_existing=True, + use_selection=True + ) + elif input.modelFileFormat == 'OBJ': + check_failed_exists(f"{model_path}.obj") + bpy.ops.export_scene.obj( + filepath=f"{model_path}.obj", + check_existing=True, + use_selection=True, + ) + elif input.modelFileFormat == 'X3D': + check_failed_exists(f"{model_path}.x3d") + bpy.ops.export_scene.x3d( + filepath=f"{model_path}.x3d", + check_existing=True, + use_selection=True + ) + elif input.modelFileFormat == 'STL': + check_failed_exists(f"{model_path}.stl") + bpy.ops.export_mesh.stl( + filepath=f"{model_path}.stl", + check_existing=True, + use_selection=True + ) + elif input.modelFileFormat == 'VOX': + check_failed_exists(f"{model_path}.vox") + bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox") + + # Loading Animation: + loading = Loader(f'Generating 3D model {x}/{nfts_in_batch}...', '').start() + generate_models() + loading.stop() + + model_generation_time_end = time.time() + + print( + f"{TextColors.OK}Generated 3D model in {model_generation_time_end - model_generation_time_start}s." + f"\n{TextColors.RESET}" + ) + + # Generating Metadata: + if input.cardanoMetaDataBool: + if not os.path.exists(cardano_metadata_path): + os.makedirs(cardano_metadata_path) + createCardanoMetadata( + name, + order_num, + full_single_dna, + dna_dictionary, + metadata_material_dict, + input.custom_Fields, + input.enableCustomFields, + input.cardano_description, + cardano_metadata_path + ) + + if input.solanaMetaDataBool: + if not os.path.exists(solana_metadata_path): + os.makedirs(solana_metadata_path) + createSolanaMetaData( + name, + order_num, + full_single_dna, + dna_dictionary, + metadata_material_dict, + input.custom_Fields, + input.enableCustomFields, + input.solana_description, + solana_metadata_path + ) + + if input.erc721MetaData: + if not os.path.exists(erc721_metadata_path): + os.makedirs(erc721_metadata_path) + createErc721MetaData( + name, + order_num, + full_single_dna, + dna_dictionary, + metadata_material_dict, + input.custom_Fields, + input.enableCustomFields, + input.erc721_description, + erc721_metadata_path + ) + + if not os.path.exists(bmnft_data_folder): + os.makedirs(bmnft_data_folder) + + for b in dna_dictionary: + if dna_dictionary[b] == "0": + dna_dictionary[b] = "Empty" + + meta_data_dict = { + "name": name, + "nft_dna": a, + "nft_variants": dna_dictionary, + "material_attributes": metadata_material_dict + } + + json_meta_data = json.dumps(meta_data_dict, indent=1, ensure_ascii=True) + + with open(os.path.join(bmnft_data_folder, "Data_" + name + ".json"), 'w') as outfile: + outfile.write(json_meta_data + '\n') + + print(f"Completed {name} render in {time.time() - time_start_2}s") + + save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batchToGenerate) + + x += 1 + + for i in hierarchy: + for j in hierarchy[i]: + bpy.data.collections[j].hide_render = False + bpy.data.collections[j].hide_viewport = False + + batch_complete_time = time.time() - time_start_1 + + print(f"\nAll NFTs successfully generated and sent to {input.nftBatch_save_path}" + f"\nCompleted all renders in Batch{input.batchToGenerate}.json in {batch_complete_time}s\n") + + batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1, + "Average time per generation": batch_complete_time / x - 1} + + batch_info_folder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate), "batch_info.json") + save_batch(batch_info, batch_info_folder) + + # Send Email that Batch is complete: + if input.emailNotificationBool: + port = 465 # For SSL + smtp_server = "smtp.gmail.com" + sender_email = input.sender_from # Enter your address + receiver_email = input.receiver_to # Enter receiver address + password = input.email_password + + # Get batch info for message: + if input.fail_state: + batch = input.fail_state + batch_data = get_batch_data(input.failed_batch, input.batch_json_save_path) + + else: + batch_data = get_batch_data(input.batchToGenerate, input.batch_json_save_path) + + batch = input.batchToGenerate + + generation_time = str(datetime.timedelta(seconds=batch_complete_time)) + + message = f"""\ + Subject: Batch {batch} completed {x - 1} NFTs in {generation_time} (h:m:s) + + Generation Time: + {generation_time.split(':')[0]} Hours, + {generation_time.split(':')[1]} Minutes, + {generation_time.split(':')[2]} Seconds + Batch Data: + + {batch_data} + + This message was sent from an instance of the Blend_My_NFTs Blender add-on. + """ + + context = ssl.create_default_context() + with smtplib.SMTP_SSL(smtp_server, port, context=context) as server: + server.login(sender_email, password) + server.sendmail(sender_email, receiver_email, message) + + # Automatic Shutdown: + # If user selects automatic shutdown but did not specify time after Batch completion + def shutdown(time): + plateform = platform.system() + + if plateform == "Windows": + os.system(f"shutdown /s /t {time}") + if plateform == "Darwin": + os.system(f"shutdown /s /t {time}") + + if input.enableAutoShutdown and not input.specify_timeBool: + shutdown(0) + + # If user selects automatic shutdown and specify time after Batch completion + if input.enableAutoShutdown and input.specify_timeBool: + hours = (int(input.hours) / 60) / 60 + minutes = int(input.minutes) / 60 + total_sleep_time = hours + minutes + + # time.sleep(total_sleep_time) + + shutdown(total_sleep_time) diff --git a/main/HeadlessUtil.py b/main/headless_util.py similarity index 91% rename from main/HeadlessUtil.py rename to main/headless_util.py index 6b09303..6c4b283 100644 --- a/main/HeadlessUtil.py +++ b/main/headless_util.py @@ -1,6 +1,6 @@ -#adding CLI arguments -#Used this as a basis: -#https://developer.blender.org/diffusion/B/browse/master/release/scripts/templates_py/background_job.py +# adding CLI arguments +# Used this as a basis: +# https://developer.blender.org/diffusion/B/browse/master/release/scripts/templates_py/background_job.py import sys import argparse diff --git a/main/Helpers.py b/main/helpers.py similarity index 100% rename from main/Helpers.py rename to main/helpers.py diff --git a/main/Intermediate.py b/main/intermediate.py similarity index 95% rename from main/Intermediate.py rename to main/intermediate.py index 1dd6f43..323bf56 100644 --- a/main/Intermediate.py +++ b/main/intermediate.py @@ -1,7 +1,7 @@ import json import bpy -from main import DNA_Generator, Exporter +from main import dna_generator, exporter def send_To_Record_JSON(input, reverse_order=False): @@ -44,7 +44,7 @@ def send_To_Record_JSON(input, reverse_order=False): num += 1 - DNA_Generator.send_to_record(input.collectionSize, + dna_generator.send_to_record(input.collectionSize, input.nftsPerBatch, input.save_path, input.enableRarity, @@ -77,4 +77,4 @@ def render_and_save_NFTs(input, reverse_order=False): else: input.custom_Fields[item.field_name] = item.field_value - Exporter.render_and_save_NFTs(input) + Exporter.render_and_save_nfts(input) diff --git a/main/Logic.py b/main/logic.py similarity index 98% rename from main/Logic.py rename to main/logic.py index 2cc316d..d76f6a4 100644 --- a/main/Logic.py +++ b/main/logic.py @@ -1,11 +1,11 @@ # Purpose: -# The purpose of this file is to add logic and rules to the DNA that are sent to the NFTRecord.json file in DNA_Generator.py +# The purpose of this file is to add logic and rules to the DNA that are sent to the NFTRecord.json file in dna_generator.py import bpy import random import collections -from .Helpers import TextColors, removeList, remove_file_by_extension, save_result +from .helpers import TextColors, removeList, remove_file_by_extension, save_result def reconstructDNA(deconstructedDNA): diff --git a/main/Material_Generator.py b/main/material_generator.py similarity index 98% rename from main/Material_Generator.py rename to main/material_generator.py index 8c52f35..e0eda71 100644 --- a/main/Material_Generator.py +++ b/main/material_generator.py @@ -7,7 +7,7 @@ import bpy import json import random -from .Helpers import TextColors +from .helpers import TextColors def select_material(materialList, variant, enableRarity): diff --git a/main/Metadata.py b/main/metadata_templates.py similarity index 98% rename from main/Metadata.py rename to main/metadata_templates.py index 618e028..2d7b8ca 100644 --- a/main/Metadata.py +++ b/main/metadata_templates.py @@ -3,7 +3,7 @@ # https://discord.gg/QTT7dzcuVs # Purpose: -# This file returns the specified meta data format to the Exporter.py for a given NFT DNA. +# This file returns the specified meta data format to the exporter.py for a given NFT DNA. import bpy import os diff --git a/main/Refactorer.py b/main/refactorer.py similarity index 96% rename from main/Refactorer.py rename to main/refactorer.py index 0faa07f..d842e05 100644 --- a/main/Refactorer.py +++ b/main/refactorer.py @@ -6,7 +6,7 @@ import os import json import shutil -from .Helpers import TextColors, removeList, remove_file_by_extension +from .helpers import TextColors, removeList, remove_file_by_extension def reformatNFTCollection(refactor_panel_input): From 57ec4b1ac1200c44f292e99aa02e7ed9f781c5a9 Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Wed, 24 Aug 2022 13:04:20 -0400 Subject: [PATCH 08/12] Pep8 formatting and renaming key variables --- UILists/custom_metadata_ui_list.py | 2 + UILists/logic_ui_list.py | 1 + __init__.py | 492 +++++++++++++++-------------- main/dna_generator.py | 4 +- main/exporter.py | 152 ++++----- main/headless_util.py | 7 +- main/helpers.py | 4 +- main/intermediate.py | 71 +++-- main/logic.py | 155 ++++----- main/material_generator.py | 118 +++---- main/metadata_templates.py | 142 ++++++--- main/refactorer.py | 36 +-- 12 files changed, 628 insertions(+), 556 deletions(-) diff --git a/UILists/custom_metadata_ui_list.py b/UILists/custom_metadata_ui_list.py index db88f25..54b4575 100644 --- a/UILists/custom_metadata_ui_list.py +++ b/UILists/custom_metadata_ui_list.py @@ -11,6 +11,7 @@ from bpy.types import (Operator, PropertyGroup, UIList) + # ======== Operators ======== # class CUSTOM_OT_custom_metadata_fields_actions(Operator): """Move items up and down, add and remove""" @@ -103,6 +104,7 @@ class CUSTOM_UL_custom_metadata_fields_items(UIList): def invoke(self, context, event): pass + # ======== Property Collection ======== # class CUSTOM_custom_metadata_fields_objectCollection(PropertyGroup): # name: StringProperty() -> Instantiated by default diff --git a/UILists/logic_ui_list.py b/UILists/logic_ui_list.py index 0f82f42..43c2f7f 100644 --- a/UILists/logic_ui_list.py +++ b/UILists/logic_ui_list.py @@ -11,6 +11,7 @@ from bpy.types import (Operator, PropertyGroup, UIList) + # ======== Operators ======== # class CUSTOM_OT_logic_actions(Operator): """Move items up and down, add and remove""" diff --git a/__init__.py b/__init__.py index 7de5fdb..640b43a 100644 --- a/__init__.py +++ b/__init__.py @@ -1,18 +1,19 @@ bl_info = { "name": "Blend_My_NFTs", - "author": "Torrin Leonard, This Cozy Studio Inc", - "version": (4, 5, 0), + "author": "Torrin Leonard, This Cozy Studio Inc.", + "version": (4, 5, 1), "blender": (3, 2, 2), "location": "View3D", - "description": "A free and opensource Blender add-on that enables you to create thousands of unique images, animations, and 3D models.", + "description": "A free and opensource Blender add-on that enables you to create thousands of unique images, " + "animations, and 3D models.", "support": "COMMUNITY", "doc_url": "https://github.com/torrinworx/Blend_My_NFTs", "tracker_url": "https://github.com/torrinworx/Blend_My_NFTs/issues/new", "category": "Development", } -BMNFTS_VERSION = "v4.5.0" -LAST_UPDATED = "12:34AM, Aug 11th, 2022" +BMNFTS_VERSION = "v4.5.1" +LAST_UPDATED = "01:02PM, Aug 24th, 2022" # ======== Import handling ======== # @@ -111,64 +112,64 @@ bpy.app.handlers.depsgraph_update_post.append(Refresh_UI) # ======== Defining BMNFTs Data ======== # @dataclass class BMNFTData: - nftName: str + nft_name: str save_path: str - nftsPerBatch: int - batchToGenerate: int - collectionSize: int + nfts_per_batch: int + batch_to_generate: int + collection_size: int - Blend_My_NFTs_Output: str + blend_my_nfts_output: str batch_json_save_path: str - nftBatch_save_path: str + nft_batch_save_path: str - enableImages: bool - imageFileFormat: str + enable_images: bool + image_file_format: str - enableAnimations: bool - animationFileFormat: str + enable_animations: bool + animation_file_format: str - enableModelsBlender: bool - modelFileFormat: str + enable_models: bool + model_file_format: str - enableCustomFields: bool + enable_custom_fields: bool - cardanoMetaDataBool: bool - solanaMetaDataBool: bool - erc721MetaData: bool + cardano_metadata_bool: bool + solana_metadata_bool: bool + erc721_metadata: bool cardano_description: str solana_description: str erc721_description: str - enableMaterials: bool - materialsFile: str + enable_materials: bool + materials_file: str - enableLogic: bool - enable_Logic_Json: bool - logicFile: str + enable_logic: bool + enable_logic_json: bool + logic_file: str - enableRarity: bool + enable_rarity: bool - enableAutoShutdown: bool + enable_auto_shutdown: bool - specify_timeBool: bool + specify_time_bool: bool hours: int minutes: int - emailNotificationBool: bool + email_notification_bool: bool sender_from: str email_password: str receiver_to: str enable_debug: bool - custom_Fields: dict = None + custom_fields: dict = None fail_state: Any = False failed_batch: Any = None failed_dna: Any = None failed_dna_index: Any = None def __post_init__(self): - self.custom_Fields = {} + self.custom_fields = {} def getBMNFTData(): @@ -176,51 +177,51 @@ def getBMNFTData(): _Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path) data = BMNFTData( - nftName=bpy.context.scene.input_tool.nftName, + nft_name=bpy.context.scene.input_tool.nft_name, save_path=_save_path, - nftsPerBatch=bpy.context.scene.input_tool.nftsPerBatch, - batchToGenerate=bpy.context.scene.input_tool.batchToGenerate, - collectionSize=bpy.context.scene.input_tool.collectionSize, + nfts_per_batch=bpy.context.scene.input_tool.nfts_per_batch, + batch_to_generate=bpy.context.scene.input_tool.batch_to_generate, + collection_size=bpy.context.scene.input_tool.collection_size, - enableRarity=bpy.context.scene.input_tool.enableRarity, + enable_rarity=bpy.context.scene.input_tool.enable_rarity, - Blend_My_NFTs_Output=_Blend_My_NFTs_Output, + blend_my_nfts_output=_Blend_My_NFTs_Output, batch_json_save_path=_batch_json_save_path, - nftBatch_save_path=_nftBatch_save_path, + nft_batch_save_path=_nftBatch_save_path, - enableLogic=bpy.context.scene.input_tool.enableLogic, - enable_Logic_Json=bpy.context.scene.input_tool.enable_Logic_Json, - logicFile=bpy.context.scene.input_tool.logicFile, + enable_logic=bpy.context.scene.input_tool.enable_logic, + enable_logic_json=bpy.context.scene.input_tool.enable_logic_json, + logic_file=bpy.context.scene.input_tool.logic_file, - enableImages=bpy.context.scene.input_tool.imageBool, - imageFileFormat=bpy.context.scene.input_tool.imageEnum, + enable_images=bpy.context.scene.input_tool.image_bool, + image_file_format=bpy.context.scene.input_tool.image_enum, - enableAnimations=bpy.context.scene.input_tool.animationBool, - animationFileFormat=bpy.context.scene.input_tool.animationEnum, + enable_animations=bpy.context.scene.input_tool.animation_bool, + animation_file_format=bpy.context.scene.input_tool.animation_enum, - enableModelsBlender=bpy.context.scene.input_tool.modelBool, - modelFileFormat=bpy.context.scene.input_tool.modelEnum, + enable_models=bpy.context.scene.input_tool.model_bool, + model_file_format=bpy.context.scene.input_tool.model_enum, - enableCustomFields=bpy.context.scene.input_tool.enableCustomFields, + enable_custom_fields=bpy.context.scene.input_tool.enable_custom_fields, - cardanoMetaDataBool=bpy.context.scene.input_tool.cardanoMetaDataBool, - solanaMetaDataBool=bpy.context.scene.input_tool.solanaMetaDataBool, - erc721MetaData=bpy.context.scene.input_tool.erc721MetaData, + cardano_metadata_bool=bpy.context.scene.input_tool.cardano_metadata_bool, + solana_metadata_bool=bpy.context.scene.input_tool.solana_metadata_bool, + erc721_metadata=bpy.context.scene.input_tool.erc721_metadata, cardano_description=bpy.context.scene.input_tool.cardano_description, solana_description=bpy.context.scene.input_tool.solana_description, erc721_description=bpy.context.scene.input_tool.erc721_description, - enableMaterials=bpy.context.scene.input_tool.enableMaterials, - materialsFile=bpy.path.abspath(bpy.context.scene.input_tool.materialsFile), + enable_materials=bpy.context.scene.input_tool.enable_materials, + materials_file=bpy.path.abspath(bpy.context.scene.input_tool.materials_file), - enableAutoShutdown=bpy.context.scene.input_tool.enableAutoShutdown, + enable_auto_shutdown=bpy.context.scene.input_tool.enable_auto_shutdown, - specify_timeBool=bpy.context.scene.input_tool.specify_timeBool, + specify_time_bool=bpy.context.scene.input_tool.specify_time_bool, hours=bpy.context.scene.input_tool.hours, minutes=bpy.context.scene.input_tool.minutes, - emailNotificationBool=bpy.context.scene.input_tool.emailNotificationBool, + email_notification_bool=bpy.context.scene.input_tool.email_notification_bool, sender_from=bpy.context.scene.input_tool.sender_from, email_password=bpy.context.scene.input_tool.email_password, receiver_to=bpy.context.scene.input_tool.receiver_to, @@ -263,35 +264,35 @@ def runAsHeadless(): print('Using {} devices for rendering!'.format(cprefs.get_num_gpu_devices())) - def dumpSettings(settings): - output = ( - f"nftName={settings.nftName}\n" - f"collectionSize={str(settings.collectionSize)}\n" - f"nftsPerBatch={str(settings.nftsPerBatch)}\n" - f"save_path={settings.save_path}\n" - f"enableRarity={(settings.enableRarity)}\n" - f"enableLogic={str(settings.enableLogic)}\n" - f"imageBool={str(settings.imageBool)}\n" - f"imageEnum={settings.imageEnum}\n" - f"animationBool={str(settings.animationBool)}\n" - f"animationEnum={settings.animationEnum}\n" - f"modelBool={str(settings.modelBool)}\n" - f"modelEnum={settings.modelEnum}\n" - f"batch_to_generate={str(settings.batchToGenerate)}\n" - f"cardanoMetaDataBool={str(settings.cardanoMetaDataBool)}\n" - f"cardano_description={settings.cardano_description}\n" - f"erc721MetaData={str(settings.erc721MetaData)}\n" - f"erc721_description={settings.erc721_description}\n" - f"solanaMetaDataBool={str(settings.solanaMetaDataBool)}\n" - f"solana_description={settings.solana_description}\n" - f"enableCustomFields={str(settings.enableCustomFields)}\n" - f"customfieldsFile={settings.customfieldsFile}\n" - f"enableMaterials={str(settings.customfieldsFile)}\n" - f"materialsFile={settings.materialsFile}\n" - ) - print(output) + # def dumpSettings(settings): + # output = ( + # f"nft_name={settings.nft_name}\n" + # f"collection_size={str(settings.collection_size)}\n" + # f"nfts_per_batch={str(settings.nfts_per_batch)}\n" + # f"save_path={settings.save_path}\n" + # f"enable_rarity={(settings.enable_rarity)}\n" + # f"enable_logic={str(settings.enable_logic)}\n" + # f"image_bool={str(settings.image_bool)}\n" + # f"image_enum={settings.image_enum}\n" + # f"animation_bool={str(settings.animation_bool)}\n" + # f"animation_enum={settings.animation_enum}\n" + # f"model_bool={str(settings.model_bool)}\n" + # f"model_enum={settings.model_enum}\n" + # f"batch_to_generate={str(settings.batch_to_generate)}\n" + # f"cardano_metadata_bool={str(settings.cardano_metadata_bool)}\n" + # f"cardano_description={settings.cardano_description}\n" + # f"erc721_metadata={str(settings.erc721_metadata)}\n" + # f"erc721_description={settings.erc721_description}\n" + # f"solana_metadata_bool={str(settings.solana_metadata_bool)}\n" + # f"solana_description={settings.solana_description}\n" + # f"enable_custom_fields={str(settings.enable_custom_fields)}\n" + # f"custom_fields_file={settings.custom_fields_file}\n" + # f"enable_materials={str(settings.custom_fields_file)}\n" + # f"materials_file={settings.materials_file}\n" + # ) + # print(output) - args, parser = headless_util.getPythonArgs() + args, parser = headless_util.get_python_args() settings = bpy.context.scene.input_tool @@ -304,37 +305,37 @@ def runAsHeadless(): # print(pairs) - settings.nftName = pairs[0][1] - settings.collectionSize = int(pairs[1][1]) - settings.nftsPerBatch = int(pairs[2][1]) + settings.nft_name = pairs[0][1] + settings.collection_size = int(pairs[1][1]) + settings.nfts_per_batch = int(pairs[2][1]) settings.save_path = pairs[3][1] - settings.enableRarity = pairs[4][1] == 'True' - settings.enableLogic = pairs[5][1] == 'True' + settings.enable_rarity = pairs[4][1]=='True' + settings.enable_logic = pairs[5][1]=='True' settings.enableLogicJson = pairs[6][1] == 'True' - settings.logicFile = pairs[7][1] - settings.imageBool = pairs[8][1] == 'True' - settings.imageEnum = pairs[9][1] - settings.animationBool = pairs[10][1] == 'True' - settings.animationEnum = pairs[11][1] - settings.modelBool = pairs[12][1] == 'True' - settings.modelEnum = pairs[13][1] - settings.batchToGenerate = int(pairs[14][1]) - settings.cardanoMetaDataBool = pairs[15][1] == 'True' + settings.logic_file = pairs[7][1] + settings.image_bool = pairs[8][1]=='True' + settings.image_enum = pairs[9][1] + settings.animation_bool = pairs[10][1]=='True' + settings.animation_enum = pairs[11][1] + settings.model_bool = pairs[12][1]=='True' + settings.model_enum = pairs[13][1] + settings.batch_to_generate = int(pairs[14][1]) + settings.cardano_metadata_bool = pairs[15][1]=='True' settings.cardano_description = pairs[16][1] - settings.erc721MetaData = pairs[17][1] == 'True' + settings.erc721_metadata = pairs[17][1]=='True' settings.erc721_description = pairs[18][1] - settings.solanaMetaDataBool = pairs[19][1] == 'True' + settings.solana_metadata_bool = pairs[19][1]=='True' settings.solanaDescription = pairs[20][1] - settings.enableCustomFields = pairs[21][1] == 'True' - settings.customfieldsFile = pairs[22][1] - settings.enableMaterials = pairs[23][1] == 'True' - settings.materialsFile = pairs[24][1] + settings.enable_custom_fields = pairs[21][1]=='True' + settings.custom_fields_file = pairs[22][1] + settings.enable_materials = pairs[23][1]=='True' + settings.materials_file = pairs[24][1] if args.save_path: settings.save_path = args.save_path if args.batch_number: - settings.batchToGenerate = args.batch_number + settings.batch_to_generate = args.batch_number input = getBMNFTData() @@ -342,27 +343,27 @@ def runAsHeadless(): input.batch_json_save_path = args.batch_data_path if args.operation == 'create-dna': - intermediate.send_To_Record_JSON(input) + intermediate.send_to_record(input) elif args.operation == 'generate-nfts': - intermediate.render_and_save_NFTs(input) + intermediate.render_and_save_nfts(input) elif args.operation == 'refactor-batches': - refactorer.reformatNFTCollection(input) + refactorer.reformat_nft_collection(input) # ======== User input Property Group ======== # class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): # Create NFT Data Panel: - nftName: bpy.props.StringProperty(name="NFT Name") + nft_name: bpy.props.StringProperty(name="NFT Name") - collectionSize: bpy.props.IntProperty( + collection_size: bpy.props.IntProperty( name="NFT Collection Size", default=1, min=1 ) # max=(combinations - offset) - nftsPerBatch: bpy.props.IntProperty( + nfts_per_batch: bpy.props.IntProperty( name="NFTs Per Batch", default=1, min=1 @@ -376,17 +377,17 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="DIR_PATH" ) - enableRarity: bpy.props.BoolProperty( + enable_rarity: bpy.props.BoolProperty( name="Enable Rarity" ) - enableLogic: bpy.props.BoolProperty( + enable_logic: bpy.props.BoolProperty( name="Enable Logic" ) - enable_Logic_Json: bpy.props.BoolProperty( + enable_logic_json: bpy.props.BoolProperty( name="Use Logic.json instead" ) - logicFile: bpy.props.StringProperty( + logic_file: bpy.props.StringProperty( name="Logic File Path", description="Path where Logic.json is located.", default="", @@ -394,10 +395,10 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="FILE_PATH" ) - enableMaterials: bpy.props.BoolProperty( + enable_materials: bpy.props.BoolProperty( name="Enable Materials" ) - materialsFile: bpy.props.StringProperty( + materials_file: bpy.props.StringProperty( name="Materials File", description="Path where Materials.json is located.", default="", @@ -406,10 +407,10 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ) # Generate NFTs Panel: - imageBool: bpy.props.BoolProperty( + image_bool: bpy.props.BoolProperty( name="Image" ) - imageEnum: bpy.props.EnumProperty( + image_enum: bpy.props.EnumProperty( name="Image File Format", description="Select Image file format", items=[ @@ -418,10 +419,10 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - animationBool: bpy.props.BoolProperty( + animation_bool: bpy.props.BoolProperty( name="Animation" ) - animationEnum: bpy.props.EnumProperty( + animation_enum: bpy.props.EnumProperty( name="Animation File Format", description="Select Animation file format", items=[ @@ -434,10 +435,10 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - modelBool: bpy.props.BoolProperty( + model_bool: bpy.props.BoolProperty( name="3D Model" ) - modelEnum: bpy.props.EnumProperty( + model_enum: bpy.props.EnumProperty( name="3D Model File Format", description="Select 3D Model file format", items=[ @@ -454,38 +455,38 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - batchToGenerate: bpy.props.IntProperty( + batch_to_generate: bpy.props.IntProperty( name="Batch To Generate", default=1, min=1 ) # Refactor Batches & Create Metadata Panel: - cardanoMetaDataBool: bpy.props.BoolProperty( + cardano_metadata_bool: bpy.props.BoolProperty( name="Cardano Cip" ) cardano_description: bpy.props.StringProperty( name="Cardano description" ) - solanaMetaDataBool: bpy.props.BoolProperty( + solana_metadata_bool: bpy.props.BoolProperty( name="Solana Metaplex" ) solana_description: bpy.props.StringProperty( name="Solana description" ) - erc721MetaData: bpy.props.BoolProperty( + erc721_metadata: bpy.props.BoolProperty( name="ERC721" ) erc721_description: bpy.props.StringProperty( name="ERC721 description" ) - enableCustomFields: bpy.props.BoolProperty( + enable_custom_fields: bpy.props.BoolProperty( name="Enable Custom Metadata Fields" ) - customfieldsFile: bpy.props.StringProperty( + custom_fields_file: bpy.props.StringProperty( name="Custom Fields File", description="Path where Custom_Fields.json is located.", default="", @@ -496,17 +497,17 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): # TODO: Add 'Other' panel inputs to Headless functionality. # Other Panel: - enableAutoSave: bpy.props.BoolProperty( + enable_auto_save: bpy.props.BoolProperty( name="Auto Save Before Generation", description="Automatically saves your Blender file when 'Generate NFTs & Create Metadata' button is clicked" ) - enableAutoShutdown: bpy.props.BoolProperty( + enable_auto_shutdown: bpy.props.BoolProperty( name="Auto Shutdown", description="Automatically shuts down your computer after a Batch is finished Generating" ) - specify_timeBool: bpy.props.BoolProperty( + specify_time_bool: bpy.props.BoolProperty( name="Shutdown in a Given Amount of Time", description="Wait a given amount of time after a Batch is generated before Automatic Shutdown" ) @@ -517,7 +518,7 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): default=0, min=0 ) - emailNotificationBool: bpy.props.BoolProperty( + email_notification_bool: bpy.props.BoolProperty( name="Email Notifications", description="Receive Email Notifications from Blender once a batch is finished generating" ) @@ -541,17 +542,18 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ) # API Panel properties: - apiKey: bpy.props.StringProperty( + api_key: bpy.props.StringProperty( name="API Key", subtype='PASSWORD' ) # Test code for future features # ======== Main Operators ======== # -class createData(bpy.types.Operator): +class Createdata(bpy.types.Operator): bl_idname = 'create.data' bl_label = 'Create Data' - bl_description = 'Creates NFT Data. Run after any changes were made to scene. All previous data will be overwritten and cannot be recovered.' + bl_description = 'Creates NFT Data. Run after any changes were made to scene. All previous data will be ' \ + 'overwritten and cannot be recovered.' bl_options = {"REGISTER", "UNDO"} reverse_order: BoolProperty( @@ -562,12 +564,12 @@ class createData(bpy.types.Operator): # Handling Custom Fields UIList input: input = getBMNFTData() - if input.enableLogic: - if input.enable_Logic_Json and not input.logicFile: + if input.enable_logic: + if input.enable_logic_json and not input.logic_file: self.report({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.") - intermediate.send_To_Record_JSON(input) + intermediate.send_to_record(input) self.report({'INFO'}, f"NFT Data created!") return {"FINISHED"} @@ -576,7 +578,7 @@ class createData(bpy.types.Operator): return context.window_manager.invoke_confirm(self, event) -class exportNFTs(bpy.types.Operator): +class ExportNFTs(bpy.types.Operator): bl_idname = 'exporter.nfts' bl_label = 'Export NFTs' bl_description = 'Generate and export a given batch of NFTs.' @@ -590,14 +592,14 @@ class exportNFTs(bpy.types.Operator): input = getBMNFTData() # Handling Custom Fields UIList input: - intermediate.render_and_save_NFTs(input) + intermediate.render_and_save_nfts(input) - self.report({'INFO'}, f"All NFTs generated for batch {input.batchToGenerate}!") + self.report({'INFO'}, f"All NFTs generated for batch {input.batch_to_generate}!") return {"FINISHED"} -class resume_failed_batch(bpy.types.Operator): +class ResumeFailedBatch(bpy.types.Operator): bl_idname = 'exporter.resume_nfts' bl_label = 'Resume Failed Batch' bl_description = 'Failed Batch detected. Generate NFTs where the previous batch failed?' @@ -607,7 +609,7 @@ class resume_failed_batch(bpy.types.Operator): _save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) _Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path) - _batchToGenerate = bpy.context.scene.input_tool.batchToGenerate + _batchToGenerate = bpy.context.scene.input_tool.batch_to_generate file_name = os.path.join(_batch_json_save_path, "Batch{}.json".format(_batchToGenerate)) batchData = json.load(open(file_name)) @@ -617,51 +619,51 @@ class resume_failed_batch(bpy.types.Operator): render_settings = batchData["Generation Save"][-1]["Render_Settings"] input = BMNFTData( - nftName=render_settings["nftName"], + nft_name=render_settings["nft_name"], save_path=_save_path, - nftsPerBatch=render_settings["nftsPerBatch"], - batchToGenerate=render_settings["batch_to_generate"], - collectionSize=render_settings["collectionSize"], + nfts_per_batch=render_settings["nfts_per_batch"], + batch_to_generate=render_settings["batch_to_generate"], + collection_size=render_settings["collection_size"], - Blend_My_NFTs_Output=_Blend_My_NFTs_Output, + blend_my_nfts_output=_Blend_My_NFTs_Output, batch_json_save_path=_batch_json_save_path, - nftBatch_save_path=render_settings["nftBatch_save_path"], + nft_batch_save_path=render_settings["nft_batch_save_path"], - enableImages=render_settings["enableImages"], - imageFileFormat=render_settings["imageFileFormat"], + enable_images=render_settings["enable_images"], + image_file_format=render_settings["image_file_format"], - enableAnimations=render_settings["enableAnimations"], - animationFileFormat=render_settings["animationFileFormat"], + enable_animations=render_settings["enable_animations"], + animation_file_format=render_settings["animation_file_format"], - enableModelsBlender=render_settings["enableModelsBlender"], - modelFileFormat=render_settings["modelFileFormat"], + enable_models=render_settings["enable_models"], + model_file_format=render_settings["model_file_format"], - enableCustomFields=render_settings["enableCustomFields"], + enable_custom_fields=render_settings["enable_custom_fields"], - cardanoMetaDataBool=render_settings["cardanoMetaDataBool"], - solanaMetaDataBool=render_settings["solanaMetaDataBool"], - erc721MetaData=render_settings["erc721MetaData"], + cardano_metadata_bool=render_settings["cardano_metadata_bool"], + solana_metadata_bool=render_settings["solana_metadata_bool"], + erc721_metadata=render_settings["erc721_metadata"], cardano_description=render_settings["cardano_description"], solana_description=render_settings["solana_description"], erc721_description=render_settings["erc721_description"], - enableMaterials=render_settings["enableMaterials"], - materialsFile=render_settings["materialsFile"], + enable_materials=render_settings["enable_materials"], + materials_file=render_settings["materials_file"], - enableLogic=render_settings["enableLogic"], - enable_Logic_Json=render_settings["enable_Logic_Json"], - logicFile=render_settings["logicFile"], + enable_logic=render_settings["enable_logic"], + enable_logic_json=render_settings["enable_logic_json"], + logic_file=render_settings["logic_file"], - enableRarity=render_settings["enableRarity"], + enable_rarity=render_settings["enable_rarity"], - enableAutoShutdown=render_settings["enableAutoShutdown"], + enable_auto_shutdown=render_settings["enable_auto_shutdown"], - specify_timeBool=render_settings["specify_timeBool"], + specify_time_bool=render_settings["specify_time_bool"], hours=render_settings["hours"], minutes=render_settings["minutes"], - emailNotificationBool=render_settings["emailNotificationBool"], + email_notification_bool=render_settings["email_notification_bool"], sender_from=render_settings["sender_from"], email_password=render_settings["email_password"], receiver_to=render_settings["receiver_to"], @@ -672,7 +674,7 @@ class resume_failed_batch(bpy.types.Operator): failed_dna=_failed_dna, failed_dna_index=_failed_dna_index, - custom_Fields=render_settings["custom_Fields"], + custom_fields=render_settings["custom_fields"], ) exporter.render_and_save_nfts(input) @@ -682,7 +684,7 @@ class resume_failed_batch(bpy.types.Operator): return {"FINISHED"} -class refactor_Batches(bpy.types.Operator): +class RefactorBatches(bpy.types.Operator): """Refactor your collection? This action cannot be undone.""" bl_idname = 'refactor.batches' bl_label = 'Refactor your Batches?' @@ -695,14 +697,14 @@ class refactor_Batches(bpy.types.Operator): def execute(self, context): # Passing info to main functions for refactoring: - refactorer.reformatNFTCollection(getBMNFTData()) + refactorer.reformat_nft_collection(getBMNFTData()) return {"FINISHED"} def invoke(self, context, event): return context.window_manager.invoke_confirm(self, event) -class export_settings(bpy.types.Operator): +class ExportSettings(bpy.types.Operator): """Export your settings into a configuration file.""" bl_idname = 'export.settings' bl_label = 'Export Settings' @@ -721,51 +723,51 @@ class export_settings(bpy.types.Operator): "#when running Blend_My_NFTs in a headless environment.\n" "\n" "#The name of your nft project\n" - f"nftName={settings.nftName}\n" + f"nft_name={settings.nft_name}\n" "\n" "#NFT Collection Size\n" - f"collectionSize={settings.collectionSize}\n" + f"collection_size={settings.collection_size}\n" "\n" "#The number of NFTs to generate per batch\n" - f"nftsPerBatch={str(settings.nftsPerBatch)}\n" + f"nfts_per_batch={str(settings.nfts_per_batch)}\n" "\n" "#Save path for your NFT files\n" f"save_path={settings.save_path}\n" "\n" "#Enable Rarity\n" - f"enableRarity={(settings.enableRarity)}\n" + f"enable_rarity={settings.enable_rarity}\n" "\n" "#Enable Logic\n" - f"enableLogic={str(settings.enableLogic)}\n" - f"enableLogicJson={str(settings.enable_Logic_Json)}\n" - f"logicFilePath={settings.logicFile}\n" + f"enable_logic={str(settings.enable_logic)}\n" + f"enableLogicJson={str(settings.enable_logic_json)}\n" + f"logicFilePath={settings.logic_file}\n" "\n" "#NFT Media output type(s):\n" - f"imageBool={str(settings.imageBool)}\n" - f"imageEnum={settings.imageEnum}\n" - f"animationBool={str(settings.animationBool)}\n" - f"animationEnum={settings.animationEnum}\n" - f"modelBool={str(settings.modelBool)}\n" - f"modelEnum={settings.modelEnum}\n" + f"image_bool={str(settings.image_bool)}\n" + f"image_enum={settings.image_enum}\n" + f"animation_bool={str(settings.animation_bool)}\n" + f"animation_enum={settings.animation_enum}\n" + f"model_bool={str(settings.model_bool)}\n" + f"model_enum={settings.model_enum}\n" "\n" "#Batch to generate\n" - f"batch_to_generate={str(settings.batchToGenerate)}\n" + f"batch_to_generate={str(settings.batch_to_generate)}\n" "\n" "#Metadata Format\n" - f"cardanoMetaDataBool={str(settings.cardanoMetaDataBool)}\n" + f"cardano_metadata_bool={str(settings.cardano_metadata_bool)}\n" f"cardano_description={settings.cardano_description}\n" - f"erc721MetaData={str(settings.erc721MetaData)}\n" + f"erc721_metadata={str(settings.erc721_metadata)}\n" f"erc721_description={settings.erc721_description}\n" - f"solanaMetaDataBool={str(settings.solanaMetaDataBool)}\n" + f"solana_metadata_bool={str(settings.solana_metadata_bool)}\n" f"solana_description={settings.solana_description}\n" "\n" "#Enable Custom Fields\n" - f"enableCustomFields={str(settings.enableCustomFields)}\n" - f"customfieldsFile={settings.customfieldsFile}\n" + f"enable_custom_fields={str(settings.enable_custom_fields)}\n" + f"custom_fields_file={settings.custom_fields_file}\n" "\n" "#Enable Materials\n" - f"enableMaterials={str(settings.enableMaterials)}\n" - f"materialsFile={settings.materialsFile}\n" + f"enable_materials={str(settings.enable_materials)}\n" + f"materials_file={settings.materials_file}\n" ) print(output, file=config) @@ -789,29 +791,29 @@ class BMNFTS_PT_CreateData(bpy.types.Panel): input_tool_scene = scene.input_tool row = layout.row() - row.prop(input_tool_scene, "nftName") + row.prop(input_tool_scene, "nft_name") row = layout.row() layout.label(text=f"Maximum Number Of NFTs: {combinations}") layout.label(text=f"Recommended limit: {recommended_limit}") row = layout.row() - row.prop(input_tool_scene, "collectionSize") + row.prop(input_tool_scene, "collection_size") row = layout.row() - row.prop(input_tool_scene, "nftsPerBatch") + row.prop(input_tool_scene, "nfts_per_batch") row = layout.row() row.prop(input_tool_scene, "save_path") row = layout.row() - row.prop(input_tool_scene, "enableRarity") + row.prop(input_tool_scene, "enable_rarity") row = layout.row() - row.prop(input_tool_scene, "enableLogic") + row.prop(input_tool_scene, "enable_logic") # Logic_UIList implementation: - if bpy.context.scene.input_tool.enableLogic: + if bpy.context.scene.input_tool.enable_logic: layout = self.layout scn = bpy.context.scene @@ -835,18 +837,18 @@ class BMNFTS_PT_CreateData(bpy.types.Panel): row.label(text=f"*Field Names must be unique.") row = layout.row() - row.prop(input_tool_scene, "enable_Logic_Json") + row.prop(input_tool_scene, "enable_logic_json") - if bpy.context.scene.input_tool.enable_Logic_Json: + if bpy.context.scene.input_tool.enable_logic_json: row = layout.row() - row.prop(input_tool_scene, "logicFile") + row.prop(input_tool_scene, "logic_file") row = layout.row() - row.prop(input_tool_scene, "enableMaterials") + row.prop(input_tool_scene, "enable_materials") - if bpy.context.scene.input_tool.enableMaterials: + if bpy.context.scene.input_tool.enable_materials: row = layout.row() - row.prop(input_tool_scene, "materialsFile") + row.prop(input_tool_scene, "materials_file") row = layout.row() self.layout.operator("create.data", icon='DISCLOSURE_TRI_RIGHT', text="Create Data") @@ -870,26 +872,26 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): layout.label(text="NFT Media files:") row = layout.row() - row.prop(input_tool_scene, "imageBool") - if bpy.context.scene.input_tool.imageBool: - row.prop(input_tool_scene, "imageEnum") + row.prop(input_tool_scene, "image_bool") + if bpy.context.scene.input_tool.image_bool: + row.prop(input_tool_scene, "image_enum") row = layout.row() - row.prop(input_tool_scene, "animationBool") - if bpy.context.scene.input_tool.animationBool: - row.prop(input_tool_scene, "animationEnum") + row.prop(input_tool_scene, "animation_bool") + if bpy.context.scene.input_tool.animation_bool: + row.prop(input_tool_scene, "animation_enum") row = layout.row() - row.prop(input_tool_scene, "modelBool") - if bpy.context.scene.input_tool.modelBool: - row.prop(input_tool_scene, "modelEnum") + row.prop(input_tool_scene, "model_bool") + if bpy.context.scene.input_tool.model_bool: + row.prop(input_tool_scene, "model_enum") row = layout.row() layout.label(text="Meta Data format:") row = layout.row() - row.prop(input_tool_scene, "cardanoMetaDataBool") - if bpy.context.scene.input_tool.cardanoMetaDataBool: + row.prop(input_tool_scene, "cardano_metadata_bool") + if bpy.context.scene.input_tool.cardano_metadata_bool: row = layout.row() row.prop(input_tool_scene, "cardano_description") @@ -898,8 +900,8 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): icon='URL').url = "https://cips.cardano.org/cips/cip25/" row = layout.row() - row.prop(input_tool_scene, "solanaMetaDataBool") - if bpy.context.scene.input_tool.solanaMetaDataBool: + row.prop(input_tool_scene, "solana_metadata_bool") + if bpy.context.scene.input_tool.solana_metadata_bool: row = layout.row() row.prop(input_tool_scene, "solana_description") @@ -908,8 +910,8 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): icon='URL').url = "https://docs.metaplex.com/token-metadata/specification" row = layout.row() - row.prop(input_tool_scene, "erc721MetaData") - if bpy.context.scene.input_tool.erc721MetaData: + row.prop(input_tool_scene, "erc721_metadata") + if bpy.context.scene.input_tool.erc721_metadata: row = layout.row() row.prop(input_tool_scene, "erc721_description") @@ -918,10 +920,10 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): icon='URL').url = "https://docs.opensea.io/docs/metadata-standards" row = layout.row() - row.prop(input_tool_scene, "enableCustomFields") + row.prop(input_tool_scene, "enable_custom_fields") # Custom Metadata Fields UIList: - if bpy.context.scene.input_tool.enableCustomFields: + if bpy.context.scene.input_tool.enable_custom_fields: layout = self.layout scn = bpy.context.scene @@ -1010,16 +1012,16 @@ class BMNFTS_PT_Other(bpy.types.Panel): """ row = layout.row() - row.prop(input_tool_scene, "enableAutoSave") + row.prop(input_tool_scene, "enable_auto_save") # Auto Shutdown: row = layout.row() - row.prop(input_tool_scene, "enableAutoShutdown") + row.prop(input_tool_scene, "enable_auto_shutdown") row.label(text="*Must Run Blender as Admin") - if bpy.context.scene.input_tool.enableAutoShutdown: + if bpy.context.scene.input_tool.enable_auto_shutdown: row = layout.row() - row.prop(input_tool_scene, "specify_timeBool") + row.prop(input_tool_scene, "specify_time_bool") time_row1 = layout.row() time_row1.label(text=f"Hours") @@ -1029,7 +1031,7 @@ class BMNFTS_PT_Other(bpy.types.Panel): time_row2.label(text=f"Minutes") time_row2.prop(input_tool_scene, "minutes", text="") - if not bpy.context.scene.input_tool.specify_timeBool: + if not bpy.context.scene.input_tool.specify_time_bool: time_row1.enabled = False time_row2.enabled = False else: @@ -1038,10 +1040,10 @@ class BMNFTS_PT_Other(bpy.types.Panel): layout.separator() row = layout.row() - row.prop(input_tool_scene, "emailNotificationBool") + row.prop(input_tool_scene, "email_notification_bool") row.label(text="*Windows 10+ only") - if bpy.context.scene.input_tool.emailNotificationBool: + if bpy.context.scene.input_tool.email_notification_bool: row = layout.row() row.prop(input_tool_scene, "sender_from") row = layout.row() @@ -1089,22 +1091,22 @@ class BMNFTS_PT_Other(bpy.types.Panel): # ======== Blender add-on register/unregister handling ======== # classes = ( - # Property Group Classes: - BMNFTS_PGT_Input_Properties, + # Property Group Classes: + BMNFTS_PGT_Input_Properties, - # Operator Classes: - createData, - exportNFTs, - resume_failed_batch, - refactor_Batches, - export_settings, + # Operator Classes: + Createdata, + ExportNFTs, + ResumeFailedBatch, + RefactorBatches, + ExportSettings, - # Panel Classes: - BMNFTS_PT_CreateData, - BMNFTS_PT_GenerateNFTs, - BMNFTS_PT_Refactor, - BMNFTS_PT_Other, - ) + custom_metadata_ui_list.classes_Custom_Metadata_UIList + logic_ui_list.classes_Logic_UIList + # Panel Classes: + BMNFTS_PT_CreateData, + BMNFTS_PT_GenerateNFTs, + BMNFTS_PT_Refactor, + BMNFTS_PT_Other, +) + custom_metadata_ui_list.classes_Custom_Metadata_UIList + logic_ui_list.classes_Logic_UIList def register(): diff --git a/main/dna_generator.py b/main/dna_generator.py index 289d56b..62a30c1 100644 --- a/main/dna_generator.py +++ b/main/dna_generator.py @@ -110,7 +110,7 @@ def generate_nft_dna( # print(f"Rarity DNA: {single_dna}") if enable_logic: - single_dna = logic.logicafyDNAsingle(hierarchy, single_dna, logic_file, enable_rarity, enable_materials) + single_dna = logic.logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity, enable_materials) # print(f"Logic DNA: {single_dna}") if enable_materials: @@ -170,7 +170,7 @@ def make_batches( batch_json_save_path ): """ - Sorts through all the batches and outputs a given number of batches depending on collectionSize and nftsPerBatch. + Sorts through all the batches and outputs a given number of batches depending on collection_size and nfts_per_batch. These files are then saved as Batch#.json files to batch_json_save_path """ diff --git a/main/exporter.py b/main/exporter.py index 0c640e0..2a70c35 100644 --- a/main/exporter.py +++ b/main/exporter.py @@ -12,7 +12,7 @@ import datetime import platform from .helpers import TextColors, Loader -from .metadata_templates import createCardanoMetadata, createSolanaMetaData, createErc721MetaData +from .metadata_templates import create_cardano_metadata, createSolanaMetaData, create_erc721_meta_data # Save info @@ -28,7 +28,7 @@ def save_generation_state(input): Saves date and time of generation start, and generation types; Images, Animations, 3D Models, and the file types for each. """ - file_name = os.path.join(input.batch_json_save_path, "Batch{}.json".format(input.batchToGenerate)) + file_name = os.path.join(input.batch_json_save_path, "Batch{}.json".format(input.batch_to_generate)) batch = json.load(open(file_name)) current_time = datetime.datetime.now().strftime("%H:%M:%S") @@ -46,56 +46,56 @@ def save_generation_state(input): "DNA Generated": None, "Generation Start Date and Time": [current_time, current_date, local_timezone], "Render_Settings": { - "nftName": input.nftName, + "nft_name": input.nft_name, "save_path": input.save_path, - "nftsPerBatch": input.nftsPerBatch, - "batch_to_generate": input.batchToGenerate, - "collectionSize": input.collectionSize, + "nfts_per_batch": input.nfts_per_batch, + "batch_to_generate": input.batch_to_generate, + "collection_size": input.collection_size, - "Blend_My_NFTs_Output": input.Blend_My_NFTs_Output, + "blend_my_nfts_output": input.blend_my_nfts_output, "batch_json_save_path": input.batch_json_save_path, - "nftBatch_save_path": input.nftBatch_save_path, + "nft_batch_save_path": input.nft_batch_save_path, - "enableImages": input.enableImages, - "imageFileFormat": input.imageFileFormat, + "enable_images": input.enable_images, + "image_file_format": input.image_file_format, - "enableAnimations": input.enableAnimations, - "animationFileFormat": input.animationFileFormat, + "enable_animations": input.enable_animations, + "animation_file_format": input.animation_file_format, - "enableModelsBlender": input.enableModelsBlender, - "modelFileFormat": input.modelFileFormat, + "enable_models": input.enable_models, + "model_file_format": input.model_file_format, - "enableCustomFields": input.enableCustomFields, + "enable_custom_fields": input.enable_custom_fields, - "cardanoMetaDataBool": input.cardanoMetaDataBool, - "solanaMetaDataBool": input.solanaMetaDataBool, - "erc721MetaData": input.erc721MetaData, + "cardano_metadata_bool": input.cardano_metadata_bool, + "solana_metadata_bool": input.solana_metadata_bool, + "erc721_metadata": input.erc721_metadata, "cardano_description": input.cardano_description, "solana_description": input.solana_description, "erc721_description": input.erc721_description, - "enableMaterials": input.enableMaterials, - "materialsFile": input.materialsFile, + "enable_materials": input.enable_materials, + "materials_file": input.materials_file, - "enableLogic": input.enableLogic, - "enable_Logic_Json": input.enable_Logic_Json, - "logicFile": input.logicFile, + "enable_logic": input.enable_logic, + "enable_logic_json": input.enable_logic_json, + "logic_file": input.logic_file, - "enableRarity": input.enableRarity, + "enable_rarity": input.enable_rarity, - "enableAutoShutdown": input.enableAutoShutdown, + "enable_auto_shutdown": input.enable_auto_shutdown, - "specify_timeBool": input.specify_timeBool, + "specify_time_bool": input.specify_time_bool, "hours": input.hours, "minutes": input.minutes, - "emailNotificationBool": input.emailNotificationBool, + "email_notification_bool": input.email_notification_bool, "sender_from": input.sender_from, "email_password": input.email_password, "receiver_to": input.receiver_to, - "custom_Fields": input.custom_Fields, + "custom_fields": input.custom_fields, }, }) @@ -148,23 +148,23 @@ def render_and_save_nfts(input): # If user is generating the normal way: else: - print(f"\nGenerating Batch #{input.batchToGenerate}\n") - nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.batchToGenerate, input.batch_json_save_path) + print(f"\nGenerating Batch #{input.batch_to_generate}\n") + nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.batch_to_generate, input.batch_json_save_path) save_generation_state(input) x = 1 - if input.enableMaterials: - materials_file = json.load(open(input.materialsFile)) + if input.enable_materials: + materials_file = json.load(open(input.materials_file)) for a in batch_dna_list: full_single_dna = list(a.keys())[0] order_num = a[full_single_dna]['order_num'] # Material handling: - if input.enableMaterials: + if input.enable_materials: single_dna, material_dna = full_single_dna.split(':') - if not input.enableMaterials: + if not input.enable_materials: single_dna = full_single_dna def match_dna_to_variant(single_dna): @@ -221,7 +221,7 @@ def render_and_save_nfts(input): metadata_material_dict = {} - if input.enableMaterials: + if input.enable_materials: material_dna_dictionary = match_material_dna_to_material(single_dna, material_dna, materials_file) for var_mat in list(material_dna_dictionary.keys()): @@ -263,7 +263,7 @@ def render_and_save_nfts(input): ) dna_dictionary = match_dna_to_variant(single_dna) - name = input.nftName + "_" + str(order_num) + name = input.nft_name + "_" + str(order_num) # Change Text Object in Scene to match DNA string: # Variables that can be used: full_single_dna, name, order_num @@ -273,7 +273,7 @@ def render_and_save_nfts(input): print(f"\n{TextColors.OK}======== Generating NFT {x}/{nfts_in_batch}: {name} ========{TextColors.RESET}") print(f"\nVariants selected:") print(f"{dna_dictionary}") - if input.enableMaterials: + if input.enable_materials: print(f"\nMaterials selected:") print(f"{material_dna_dictionary}") @@ -288,7 +288,7 @@ def render_and_save_nfts(input): time_start_2 = time.time() # Main paths for batch sub-folders: - batch_folder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate)) + batch_folder = os.path.join(input.nft_batch_save_path, "Batch" + str(input.batch_to_generate)) image_folder = os.path.join(batch_folder, "Images") animation_folder = os.path.join(batch_folder, "Animations") @@ -313,7 +313,7 @@ def render_and_save_nfts(input): os.remove(file_path) # Generation/Rendering: - if input.enableImages: + if input.enable_images: print(f"{TextColors.OK}-------- Image --------{TextColors.RESET}") @@ -326,7 +326,7 @@ def render_and_save_nfts(input): os.makedirs(image_folder) bpy.context.scene.render.filepath = image_path - bpy.context.scene.render.image_settings.file_format = input.imageFileFormat + bpy.context.scene.render.image_settings.file_format = input.image_file_format bpy.ops.render.render(write_still=True) # Loading Animation: @@ -341,7 +341,7 @@ def render_and_save_nfts(input): f"\n{TextColors.RESET}" ) - if input.enableAnimations: + if input.enable_animations: print(f"{TextColors.OK}-------- Animation --------{TextColors.RESET}") animation_render_time_start = time.time() @@ -352,7 +352,7 @@ def render_and_save_nfts(input): if not os.path.exists(animation_folder): os.makedirs(animation_folder) - if input.animationFileFormat == "MP4": + if input.animation_file_format =="MP4": bpy.context.scene.render.filepath = animation_path bpy.context.scene.render.image_settings.file_format = "FFMPEG" @@ -360,25 +360,25 @@ def render_and_save_nfts(input): bpy.context.scene.render.ffmpeg.codec = 'H264' bpy.ops.render.render(animation=True) - elif input.animationFileFormat == 'PNG': + elif input.animation_file_format =='PNG': if not os.path.exists(animation_path): os.makedirs(animation_path) bpy.context.scene.render.filepath = os.path.join(animation_path, name) - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat + bpy.context.scene.render.image_settings.file_format = input.animation_file_format bpy.ops.render.render(animation=True) - elif input.animationFileFormat == 'TIFF': + elif input.animation_file_format =='TIFF': if not os.path.exists(animation_path): os.makedirs(animation_path) bpy.context.scene.render.filepath = os.path.join(animation_path, name) - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat + bpy.context.scene.render.image_settings.file_format = input.animation_file_format bpy.ops.render.render(animation=True) else: bpy.context.scene.render.filepath = animation_path - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat + bpy.context.scene.render.image_settings.file_format = input.animation_file_format bpy.ops.render.render(animation=True) # Loading Animation: @@ -393,7 +393,7 @@ def render_and_save_nfts(input): f"\n{TextColors.RESET}" ) - if input.enableModelsBlender: + if input.enable_models: print(f"{TextColors.OK}-------- 3D Model --------{TextColors.RESET}") model_generation_time_start = time.time() @@ -419,7 +419,7 @@ def render_and_save_nfts(input): # if obj.name in remove_objects: # obj.select_set(False) - if input.modelFileFormat == 'GLB': + if input.model_file_format =='GLB': check_failed_exists(f"{model_path}.glb") bpy.ops.export_scene.gltf( filepath=f"{model_path}.glb", @@ -428,7 +428,7 @@ def render_and_save_nfts(input): export_keep_originals=True, use_selection=True ) - if input.modelFileFormat == 'GLTF_SEPARATE': + if input.model_file_format =='GLTF_SEPARATE': check_failed_exists(f"{model_path}.gltf") check_failed_exists(f"{model_path}.bin") bpy.ops.export_scene.gltf( @@ -438,7 +438,7 @@ def render_and_save_nfts(input): export_keep_originals=True, use_selection=True ) - if input.modelFileFormat == 'GLTF_EMBEDDED': + if input.model_file_format =='GLTF_EMBEDDED': check_failed_exists(f"{model_path}.gltf") bpy.ops.export_scene.gltf( filepath=f"{model_path}.gltf", @@ -447,35 +447,35 @@ def render_and_save_nfts(input): export_keep_originals=True, use_selection=True ) - elif input.modelFileFormat == 'FBX': + elif input.model_file_format =='FBX': check_failed_exists(f"{model_path}.fbx") bpy.ops.export_scene.fbx( filepath=f"{model_path}.fbx", check_existing=True, use_selection=True ) - elif input.modelFileFormat == 'OBJ': + elif input.model_file_format =='OBJ': check_failed_exists(f"{model_path}.obj") bpy.ops.export_scene.obj( filepath=f"{model_path}.obj", check_existing=True, use_selection=True, ) - elif input.modelFileFormat == 'X3D': + elif input.model_file_format =='X3D': check_failed_exists(f"{model_path}.x3d") bpy.ops.export_scene.x3d( filepath=f"{model_path}.x3d", check_existing=True, use_selection=True ) - elif input.modelFileFormat == 'STL': + elif input.model_file_format =='STL': check_failed_exists(f"{model_path}.stl") bpy.ops.export_mesh.stl( filepath=f"{model_path}.stl", check_existing=True, use_selection=True ) - elif input.modelFileFormat == 'VOX': + elif input.model_file_format =='VOX': check_failed_exists(f"{model_path}.vox") bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox") @@ -492,22 +492,22 @@ def render_and_save_nfts(input): ) # Generating Metadata: - if input.cardanoMetaDataBool: + if input.cardano_metadata_bool: if not os.path.exists(cardano_metadata_path): os.makedirs(cardano_metadata_path) - createCardanoMetadata( + create_cardano_metadata( name, order_num, full_single_dna, dna_dictionary, metadata_material_dict, - input.custom_Fields, - input.enableCustomFields, + input.custom_fields, + input.enable_custom_fields, input.cardano_description, cardano_metadata_path ) - if input.solanaMetaDataBool: + if input.solana_metadata_bool: if not os.path.exists(solana_metadata_path): os.makedirs(solana_metadata_path) createSolanaMetaData( @@ -516,23 +516,23 @@ def render_and_save_nfts(input): full_single_dna, dna_dictionary, metadata_material_dict, - input.custom_Fields, - input.enableCustomFields, + input.custom_fields, + input.enable_custom_fields, input.solana_description, solana_metadata_path ) - if input.erc721MetaData: + if input.erc721_metadata: if not os.path.exists(erc721_metadata_path): os.makedirs(erc721_metadata_path) - createErc721MetaData( + create_erc721_meta_data( name, order_num, full_single_dna, dna_dictionary, metadata_material_dict, - input.custom_Fields, - input.enableCustomFields, + input.custom_fields, + input.enable_custom_fields, input.erc721_description, erc721_metadata_path ) @@ -558,7 +558,7 @@ def render_and_save_nfts(input): print(f"Completed {name} render in {time.time() - time_start_2}s") - save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batchToGenerate) + save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batch_to_generate) x += 1 @@ -569,17 +569,17 @@ def render_and_save_nfts(input): batch_complete_time = time.time() - time_start_1 - print(f"\nAll NFTs successfully generated and sent to {input.nftBatch_save_path}" - f"\nCompleted all renders in Batch{input.batchToGenerate}.json in {batch_complete_time}s\n") + print(f"\nAll NFTs successfully generated and sent to {input.nft_batch_save_path}" + f"\nCompleted all renders in Batch{input.batch_to_generate}.json in {batch_complete_time}s\n") batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1, "Average time per generation": batch_complete_time / x - 1} - batch_info_folder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate), "batch_info.json") + batch_info_folder = os.path.join(input.nft_batch_save_path, "Batch" + str(input.batch_to_generate), "batch_info.json") save_batch(batch_info, batch_info_folder) # Send Email that Batch is complete: - if input.emailNotificationBool: + if input.email_notification_bool: port = 465 # For SSL smtp_server = "smtp.gmail.com" sender_email = input.sender_from # Enter your address @@ -592,9 +592,9 @@ def render_and_save_nfts(input): batch_data = get_batch_data(input.failed_batch, input.batch_json_save_path) else: - batch_data = get_batch_data(input.batchToGenerate, input.batch_json_save_path) + batch_data = get_batch_data(input.batch_to_generate, input.batch_json_save_path) - batch = input.batchToGenerate + batch = input.batch_to_generate generation_time = str(datetime.timedelta(seconds=batch_complete_time)) @@ -627,11 +627,11 @@ def render_and_save_nfts(input): if plateform == "Darwin": os.system(f"shutdown /s /t {time}") - if input.enableAutoShutdown and not input.specify_timeBool: + if input.enable_auto_shutdown and not input.specify_time_bool: shutdown(0) # If user selects automatic shutdown and specify time after Batch completion - if input.enableAutoShutdown and input.specify_timeBool: + if input.enable_auto_shutdown and input.specify_time_bool: hours = (int(input.hours) / 60) / 60 minutes = int(input.minutes) / 60 total_sleep_time = hours + minutes diff --git a/main/headless_util.py b/main/headless_util.py index 6c4b283..dbe0f6e 100644 --- a/main/headless_util.py +++ b/main/headless_util.py @@ -5,7 +5,8 @@ import sys import argparse -def getPythonArgs(): + +def get_python_args(): argv = sys.argv @@ -15,7 +16,7 @@ def getPythonArgs(): argv = argv[argv.index("--") + 1:] # get all args after "--" usage_text = ( - "Run Blend_My_NFTs headlessly from the command line\n" + "Run Blend_My_NFTs heedlessly from the command line\n" "usage:\n" "blender -background --python -- --config-file " ) @@ -64,4 +65,4 @@ def getPythonArgs(): help="Overwrite the logic file path in the config file" ) - return (parser.parse_args(argv), parser) \ No newline at end of file + return parser.parse_args(argv), parser diff --git a/main/helpers.py b/main/helpers.py index e606bca..870adfa 100644 --- a/main/helpers.py +++ b/main/helpers.py @@ -408,7 +408,7 @@ def raise_error_num_batches_greater_then(num_batches): # Raise Warnings: def raise_warning_max_nfts(nfts_per_batch, collection_size): """ - Prints warning if nftsPerBatch is greater than collectionSize. + Prints warning if nfts_per_batch is greater than collection_size. """ if nfts_per_batch > collection_size: @@ -421,7 +421,7 @@ def raise_warning_max_nfts(nfts_per_batch, collection_size): def raise_warning_collection_size(dna_list, collection_size): """ - Prints warning if BMNFTs cannot generate requested number of NFTs from a given collectionSize. + Prints warning if BMNFTs cannot generate requested number of NFTs from a given collection_size. """ if len(dna_list) < collection_size: diff --git a/main/intermediate.py b/main/intermediate.py index 323bf56..ae0eece 100644 --- a/main/intermediate.py +++ b/main/intermediate.py @@ -1,21 +1,23 @@ -import json import bpy +import json from main import dna_generator, exporter +# TODO: migrate this code to the exporter.py to simplify render process into one file. -def send_To_Record_JSON(input, reverse_order=False): - if input.enableLogic: - if input.enable_Logic_Json and input.logicFile: - input.logicFile = json.load(open(input.logicFile)) - if input.enable_Logic_Json and not input.logicFile: +def send_to_record(input, reverse_order=False): + if input.enable_logic: + if input.enable_logic_json and input.logic_file: + input.logic_file = json.load(open(input.logic_file)) + + if input.enable_logic_json and not input.logic_file: print({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.") - if not input.enable_Logic_Json: + if not input.enable_logic_json: scn = bpy.context.scene if reverse_order: - input.logicFile = {} + input.logic_file = {} num = 1 for i in range(scn.logic_fields_index, -1, -1): item = scn.logic_fields[i] @@ -23,20 +25,20 @@ def send_To_Record_JSON(input, reverse_order=False): item_list1 = item.item_list1 rule_type = item.rule_type item_list2 = item.item_list2 - input.logicFile[f"Rule-{num}"] = { + input.logic_file[f"Rule-{num}"] = { "IF": item_list1.split(','), rule_type: item_list2.split(',') } print(rule_type) num += 1 else: - input.logicFile = {} + input.logic_file = {} num = 1 for item in scn.logic_fields: item_list1 = item.item_list1 rule_type = item.rule_type item_list2 = item.item_list2 - input.logicFile[f"Rule-{num}"] = { + input.logic_file[f"Rule-{num}"] = { "IF": item_list1.split(','), rule_type: item_list2.split(',') } @@ -44,37 +46,42 @@ def send_To_Record_JSON(input, reverse_order=False): num += 1 - dna_generator.send_to_record(input.collectionSize, - input.nftsPerBatch, - input.save_path, - input.enableRarity, - input.enableLogic, - input.logicFile, - input.enableMaterials, - input.materialsFile, - input.Blend_My_NFTs_Output, - input.batch_json_save_path, - input.enable_debug, - ) + dna_generator.send_to_record( + input.collection_size, + input.nfts_per_batch, + input.save_path, + input.enable_rarity, + input.enable_logic, + input.logic_file, + input.enable_materials, + input.materials_file, + input.blend_my_nfts_output, + input.batch_json_save_path, + input.enable_debug, + ) -def render_and_save_NFTs(input, reverse_order=False): - if input.enableCustomFields: +def render_and_save_nfts(input, reverse_order=False): + if input.enable_custom_fields: scn = bpy.context.scene if reverse_order: for i in range(scn.custom_metadata_fields_index, -1, -1): item = scn.custom_metadata_fields[i] - if item.field_name in list(input.custom_Fields.keys()): + if item.field_name in list(input.custom_fields.keys()): raise ValueError( - f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names are unique.") + f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names " + f"are unique." + ) else: - input.custom_Fields[item.field_name] = item.field_value + input.custom_fields[item.field_name] = item.field_value else: for item in scn.custom_metadata_fields: - if item.field_name in list(input.custom_Fields.keys()): + if item.field_name in list(input.custom_fields.keys()): raise ValueError( - f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names are unique.") + f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names " + f"are unique." + ) else: - input.custom_Fields[item.field_name] = item.field_value + input.custom_fields[item.field_name] = item.field_value - Exporter.render_and_save_nfts(input) + exporter.render_and_save_nfts(input) diff --git a/main/logic.py b/main/logic.py index d76f6a4..7f2b0de 100644 --- a/main/logic.py +++ b/main/logic.py @@ -1,19 +1,19 @@ # Purpose: -# The purpose of this file is to add logic and rules to the DNA that are sent to the NFTRecord.json file in dna_generator.py +# The purpose of this file is to add logic and rules to the DNA that are sent to the NFTRecord.json file in +# dna_generator.py -import bpy import random import collections -from .helpers import TextColors, removeList, remove_file_by_extension, save_result +from .helpers import TextColors -def reconstructDNA(deconstructedDNA): - reconstructed_DNA = "" - for a in deconstructedDNA: +def reconstruct_dna(deconstructed_dna): + reconstructed_dna = "" + for a in deconstructed_dna: num = "-" + str(a) - reconstructed_DNA += num - return ''.join(reconstructed_DNA.split('-', 1)) + reconstructed_dna += num + return ''.join(reconstructed_dna.split('-', 1)) def get_var_info(variant, hierarchy): @@ -33,11 +33,11 @@ def get_var_info(variant, hierarchy): return [name, order_number, rarity_number, attribute, attribute_index] # list of Var info sent back -def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type, enableRarity): - # Check if Variants in if_dict are in deconstructed_DNA, if so return if_list_selected = True: +def apply_rules_to_dna(hierarchy, deconstructed_dna, if_dict, result_dict, result_dict_type, enable_rarity): + # Check if Variants in if_dict are in deconstructed_dna, if so return if_list_selected = True: if_list_selected = False - for a in deconstructed_DNA: - attribute_index = deconstructed_DNA.index(a) + for a in deconstructed_dna: + attribute_index = deconstructed_dna.index(a) attribute = list(hierarchy.keys())[attribute_index] for b in hierarchy[attribute]: @@ -49,23 +49,23 @@ def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, resul if_list_selected = True # Apply changes in accordance to Variants in 'result_dict' and 'if_list_selected' bool above: - for a in deconstructed_DNA: - attribute_index = deconstructed_DNA.index(a) + for a in deconstructed_dna: + attribute_index = deconstructed_dna.index(a) attribute = list(hierarchy.keys())[attribute_index] if attribute in result_dict: # Check if Attribute from DNA is in 'result_dict' # If 'a' is a full Attribute and Variants in if_dict not selected, set 'a' to empty (0): if list(result_dict[attribute].keys()) == list(hierarchy[attribute].keys()) and not if_list_selected: - deconstructed_DNA[attribute_index] = "0" + deconstructed_dna[attribute_index] = "0" # If 'a' is a full Attribute and result_dict_type = "NOT", set 'a' to empty (0): if list(result_dict[attribute].keys()) == list( hierarchy[attribute].keys()) and if_list_selected and result_dict_type == "NOT": - deconstructed_DNA[attribute_index] = "0" + deconstructed_dna[attribute_index] = "0" - # If Variants in if_dict are selected, set each attribute in 'result_dict' to a random or rarity selected Variant from - # 'result_dict[attribute]' variant_list: + # If Variants in if_dict are selected, set each attribute in 'result_dict' to a random or rarity selected Variant + # from 'result_dict[attribute]' variant_list: if if_list_selected: # Invert 'items_returned' if 'NOT' rule is selected: @@ -91,60 +91,60 @@ def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, resul if attribute in result_dict: # Check if Attribute from DNA is in 'then_dict' - number_List_Of_i = [] - rarity_List_Of_i = [] - ifZeroBool = None - variantNum = None + number_list_of_i = [] + rarity_list_of_i = [] + if_zero_bool = None + variant_num = None for b in variant_list: number = b.split("_")[1] rarity = b.split("_")[2] - number_List_Of_i.append(int(number)) - rarity_List_Of_i.append(float(rarity)) + number_list_of_i.append(int(number)) + rarity_list_of_i.append(float(rarity)) - for b in rarity_List_Of_i: + for b in rarity_list_of_i: if b == 0: - ifZeroBool = True + if_zero_bool = True elif b != 0: - ifZeroBool = False + if_zero_bool = False - if enableRarity: + if enable_rarity: try: - if ifZeroBool: - variantNum = random.choices(number_List_Of_i, k=1) - elif not ifZeroBool: - variantNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1) + if if_zero_bool: + variant_num = random.choices(number_list_of_i, k=1) + elif not if_zero_bool: + variant_num = random.choices(number_list_of_i, weights=rarity_list_of_i, k=1) except IndexError: raise IndexError( f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Attribute collection '{a}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{TextColors.RESET}" + f"An issue was found within the Attribute collection '{a}'. For more information on " + f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) else: try: - variantNum = random.choices(number_List_Of_i, k=1) + variant_num = random.choices(number_list_of_i, k=1) except IndexError: raise IndexError( f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Attribute collection '{a}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{TextColors.RESET}" + f"An issue was found within the Attribute collection '{a}'. For more information on " + f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) - deconstructed_DNA[int(attribute_index)] = str(variantNum[0]) + deconstructed_dna[int(attribute_index)] = str(variant_num[0]) - return deconstructed_DNA + return deconstructed_dna -def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type): - # Check if Variants in 'if_dict' found in deconstructed_DNA: - if_bool = False # True if Variant in 'deconstructed_DNA' found in 'if_dict' +def get_rule_break_type(hierarchy, deconstructed_dna, if_dict, result_dict, result_dict_type): + # Check if Variants in 'if_dict' found in deconstructed_dna: + if_bool = False # True if Variant in 'deconstructed_dna' found in 'if_dict' for a in if_dict: # Attribute in 'if_dict' for b in if_dict[a]: # Variant in if_dict[Attribute] var_order_num = str(if_dict[a][b][1]) # Order number of 'b' (Variant) dna_order_num = str( - deconstructed_DNA[if_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_DNA + deconstructed_dna[if_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_dna if var_order_num == dna_order_num: # If DNA selected Variants found inside IF list variants: if_bool = True @@ -153,14 +153,14 @@ def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, resu continue break - # Check if Variants in 'result_dict' found in deconstructed_DNA: + # Check if Variants in 'result_dict' found in deconstructed_dna: full_att_bool = False - result_bool = False # True if Variant in 'deconstructed_DNA' found in 'result_dict' + result_bool = False # True if Variant in 'deconstructed_dna' found in 'result_dict' for a in result_dict: # Attribute in 'result_dict' for b in result_dict[a]: # Variant in if_dict[Attribute] var_order_num = str(result_dict[a][b][1]) # Order number of 'b' (Variant) dna_order_num = str( - deconstructed_DNA[result_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_DNA + deconstructed_dna[result_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_dna if var_order_num == dna_order_num: # If DNA selected Variants found inside THEN list variants: if list(result_dict[a].keys()) == list(hierarchy[a].keys()): full_att_bool = True @@ -173,20 +173,20 @@ def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, resu # Rule Bool return summary: violates_rule = False - # If Variants in 'if_dict' found in deconstructed_DNA and Variants in 'result_dict' not found in deconstructed_DNA: + # If Variants in 'if_dict' found in deconstructed_dna and Variants in 'result_dict' not found in deconstructed_dna: if if_bool and not result_bool: violates_rule = True elif if_bool and result_bool and result_dict_type == "NOT": violates_rule = True - # If Variants in 'if_dict' not found in deconstructed_DNA, and 'result_dict' variants are found in deconstructed_DNA, - # and they are a part of a full Attribute in 'then_dict' + # If Variants in 'if_dict' not found in deconstructed_dna, and 'result_dict' variants are found in + # deconstructed_dna, and they are a part of a full Attribute in 'then_dict' elif not if_bool and result_bool and full_att_bool: violates_rule = True - # If Variants in 'if_dict' not found in deconstructed_DNA, but Variants in 'then_dict' are found in deconstructed_DNA, - # and don't make up a full Attribute: + # If Variants in 'if_dict' not found in deconstructed_dna, but Variants in 'then_dict' are found in + # deconstructed_dna, and don't make up a full Attribute: # elif not if_bool and result_bool and not full_att_bool: # violates_rule = False @@ -252,41 +252,50 @@ def create_dicts(hierarchy, rule_list_items, result_dict_type): return dict(items_returned) -def logicafyDNAsingle(hierarchy, singleDNA, logicFile, enableRarity, enableMaterials): - deconstructed_DNA = singleDNA.split("-") - didReconstruct = True - originalDNA = str(singleDNA) +def logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity): + deconstructed_dna = single_dna.split("-") + did_reconstruct = True + original_dna = str(single_dna) - while didReconstruct: - didReconstruct = False - for rule in logicFile: + while did_reconstruct: + did_reconstruct = False + for rule in logic_file: # Items from 'IF' key for a given rule - if_dict = create_dicts(hierarchy, logicFile[rule]["IF"], "IF") + if_dict = create_dicts(hierarchy, logic_file[rule]["IF"], "IF") result_dict_type = "" - if "THEN" in logicFile[rule]: + if "THEN" in logic_file[rule]: result_dict_type = "THEN" - if "NOT" in logicFile[rule]: + if "NOT" in logic_file[rule]: result_dict_type = "NOT" - result_dict = create_dicts(hierarchy, logicFile[rule][result_dict_type], result_dict_type) + result_dict = create_dicts(hierarchy, logic_file[rule][result_dict_type], result_dict_type) # Change 'then_bool' to 'result_bool' - violates_rule, if_bool, then_bool, full_att_bool = get_rule_break_type(hierarchy, deconstructed_DNA, - if_dict, result_dict, - result_dict_type) + violates_rule, if_bool, then_bool, full_att_bool = get_rule_break_type( + hierarchy, + deconstructed_dna, + if_dict, + result_dict, + result_dict_type, + ) if violates_rule: - # print(f"======={deconstructed_DNA} VIOLATES RULE======") + # print(f"======={deconstructed_dna} VIOLATES RULE======") - deconstructed_DNA = apply_rules_to_dna( - hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type, enableRarity + deconstructed_dna = apply_rules_to_dna( + hierarchy, + deconstructed_dna, + if_dict, + result_dict, + result_dict_type, + enable_rarity ) - newDNA = reconstructDNA(deconstructed_DNA) - if newDNA != originalDNA: - originalDNA = str(newDNA) - didReconstruct = True + new_dna = reconstruct_dna(deconstructed_dna) + if new_dna != original_dna: + original_dna = str(new_dna) + did_reconstruct = True break - return str(reconstructDNA(deconstructed_DNA)) + return str(reconstruct_dna(deconstructed_dna)) diff --git a/main/material_generator.py b/main/material_generator.py index e0eda71..5dc2d89 100644 --- a/main/material_generator.py +++ b/main/material_generator.py @@ -3,62 +3,61 @@ # also specified in the .json file. The Materialized DNA is then returned in the following format: 1-1-1:1-1-1 # Where the numbers right of the ":" are the material numbers applied to the respective Variants to the left of the ":" -import bpy - import json import random from .helpers import TextColors -def select_material(materialList, variant, enableRarity): +def select_material(material_list, variant, enable_rarity): """Selects a material from a passed material list. """ - material_List_Of_i = [] # List of Material names instead of order numbers - rarity_List_Of_i = [] - ifZeroBool = None + material_list_of_i = [] # List of Material names instead of order numbers + rarity_list_of_i = [] + if_zero_bool = None - for material in materialList: + for material in material_list: # Material Order Number comes from index in the Material List in materials.json for a given Variant. - # material_order_num = list(materialList.keys()).index(material) + # material_order_num = list(material_list.keys()).index(material) - material_List_Of_i.append(material) + material_list_of_i.append(material) - material_rarity_percent = materialList[material] - rarity_List_Of_i.append(float(material_rarity_percent)) + material_rarity_percent = material_list[material] + rarity_list_of_i.append(float(material_rarity_percent)) - # print(f"MATERIAL_LIST_OF_I:{material_List_Of_i}") - # print(f"RARITY_LIST_OF_I:{rarity_List_Of_i}") + # print(f"MATERIAL_LIST_OF_I:{material_list_of_i}") + # print(f"RARITY_LIST_OF_I:{rarity_list_of_i}") - for b in rarity_List_Of_i: + for b in rarity_list_of_i: if b == 0: - ifZeroBool = True + if_zero_bool = True elif b != 0: - ifZeroBool = False + if_zero_bool = False - if enableRarity: + if enable_rarity: try: - if ifZeroBool: - selected_material = random.choices(material_List_Of_i, k=1) - elif not ifZeroBool: - selected_material = random.choices(material_List_Of_i, weights=rarity_List_Of_i, k=1) + if if_zero_bool: + selected_material = random.choices(material_list_of_i, k=1) + elif not if_zero_bool: + selected_material = random.choices(material_list_of_i, weights=rarity_list_of_i, k=1) except IndexError: raise IndexError( f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Material List of the Variant collection '{variant}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{TextColors.RESET}" + f"An issue was found within the Material List of the Variant collection '{variant}'. For more " + f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) else: try: - selected_material = random.choices(material_List_Of_i, k=1) + selected_material = random.choices(material_list_of_i, k=1) except IndexError: raise IndexError( f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Material List of the Variant collection '{variant}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{TextColors.RESET}" + f"An issue was found within the Material List of the Variant collection '{variant}'. For more " + f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) - return selected_material[0], materialList + return selected_material[0], material_list + def get_variant_att_index(variant, hierarchy): variant_attribute = None @@ -72,26 +71,28 @@ def get_variant_att_index(variant, hierarchy): variant_order_num = variant.split("_")[1] return attribute_index, variant_order_num -def match_DNA_to_Variant(hierarchy, singleDNA): + +def match_dna_to_variant(hierarchy, single_dna): """ Matches each DNA number separated by "-" to its attribute, then its variant. """ - listAttributes = list(hierarchy.keys()) - listDnaDecunstructed = singleDNA.split('-') - dnaDictionary = {} + list_attributes = list(hierarchy.keys()) + list_dna_decunstructed = single_dna.split('-') + dna_dictionary = {} - for i, j in zip(listAttributes, listDnaDecunstructed): - dnaDictionary[i] = j + for i, j in zip(list_attributes, list_dna_decunstructed): + dna_dictionary[i] = j - for x in dnaDictionary: + for x in dna_dictionary: for k in hierarchy[x]: - kNum = hierarchy[x][k]["number"] - if kNum == dnaDictionary[x]: - dnaDictionary.update({x: k}) - return dnaDictionary + k_num = hierarchy[x][k]["number"] + if k_num == dna_dictionary[x]: + dna_dictionary.update({x: k}) + return dna_dictionary -def apply_materials(hierarchy, singleDNA, materialsFile, enableRarity): + +def apply_materials(hierarchy, single_dna, materials_file, enable_rarity): """ DNA with applied material example: "1-1:1-1" : @@ -99,20 +100,23 @@ def apply_materials(hierarchy, singleDNA, materialsFile, enableRarity): list in the Variant_Material.json file. """ - singleDNADict = match_DNA_to_Variant(hierarchy, singleDNA) - materialsFile = json.load(open(materialsFile)) - deconstructed_MaterialDNA = {} + single_dna_dict = match_dna_to_variant(hierarchy, single_dna) + materials_file = json.load(open(materials_file)) + deconstructed_material_dna = {} - for a in singleDNADict: + for a in single_dna_dict: complete = False - for b in materialsFile: - if singleDNADict[a] == b: - material_name, materialList, = select_material(materialsFile[b]['Material List'], b, enableRarity) - material_order_num = list(materialList.keys()).index(material_name) # Gets the Order Number of the Material - deconstructed_MaterialDNA[a] = str(material_order_num + 1) + for b in materials_file: + if single_dna_dict[a] == b: + material_name, material_list, = select_material(materials_file[b]['Material List'], b, enable_rarity) + + # Gets the Order Number of the Material + material_order_num = list(material_list.keys()).index(material_name) + + deconstructed_material_dna[a] = str(material_order_num + 1) complete = True if not complete: - deconstructed_MaterialDNA[a] = "0" + deconstructed_material_dna[a] = "0" # This section is now incorrect and needs updating: @@ -121,14 +125,14 @@ def apply_materials(hierarchy, singleDNA, materialsFile, enableRarity): # Attribute 'B' = 1, 'C' = 2, 'D' = 3, etc. For each pair you want to equal another, add its number it to this list: # synced_material_attributes = [1, 2] # - # first_mat = deconstructed_MaterialDNA[synced_material_attributes[0]] + # first_mat = deconstructed_material_dna[synced_material_attributes[0]] # for i in synced_material_attributes: - # deconstructed_MaterialDNA[i] = first_mat + # deconstructed_material_dna[i] = first_mat - material_DNA = "" - for a in deconstructed_MaterialDNA: - num = "-" + str(deconstructed_MaterialDNA[a]) - material_DNA += num - material_DNA = ''.join(material_DNA.split('-', 1)) + material_dna = "" + for a in deconstructed_material_dna: + num = "-" + str(deconstructed_material_dna[a]) + material_dna += num + material_dna = ''.join(material_dna.split('-', 1)) - return f"{singleDNA}:{material_DNA}" \ No newline at end of file + return f"{single_dna}:{material_dna}" diff --git a/main/metadata_templates.py b/main/metadata_templates.py index 2d7b8ca..1aa6210 100644 --- a/main/metadata_templates.py +++ b/main/metadata_templates.py @@ -3,26 +3,37 @@ # https://discord.gg/QTT7dzcuVs # Purpose: -# This file returns the specified meta data format to the exporter.py for a given NFT DNA. +# This file returns the specified metadata format to the exporter.py for a given NFT DNA. -import bpy import os import json -def sendMetaDataToJson(metaDataDict, save_path, file_name): - jsonMetaData = json.dumps(metaDataDict, indent=1, ensure_ascii=True) - with open(os.path.join(save_path, f"{file_name}.json"), 'w') as outfile: - outfile.write(jsonMetaData + '\n') -def stripNums(variant): +def send_metadata_to_json(meta_data_dict, save_path, file_name): + json_metadata = json.dumps(meta_data_dict, indent=1, ensure_ascii=True) + with open(os.path.join(save_path, f"{file_name}.json"), 'w') as outfile: + outfile.write(json_metadata + '\n') + + +def strip_nums(variant): variant = str(variant).split('_')[0] return variant -# Cardano Template -def createCardanoMetadata(name, Order_Num, NFT_DNA, NFT_Variants, Material_Attributes, - custom_Fields, enableCustomFields, cardano_description, cardanoMetadataPath): - metaDataDictCardano = {"721": { +# Cardano Template +def create_cardano_metadata( + name, + order_num, + nft_dna, + nft_variants, + material_attributes, + custom_fields, + enable_custom_fields, + cardano_description, + cardano_metadata_path +): + + meta_data_dict_cardano = {"721": { "": { name: { "name": name, @@ -35,73 +46,107 @@ def createCardanoMetadata(name, Order_Num, NFT_DNA, NFT_Variants, Material_Attri }} # Variants and Attributes: - for i in NFT_Variants: - metaDataDictCardano["721"][""][name][i] = stripNums(NFT_Variants[i]) + for i in nft_variants: + meta_data_dict_cardano["721"][""][name][i] = strip_nums(nft_variants[i]) # Material Variants and Attributes: - for i in Material_Attributes: - metaDataDictCardano["721"][""][name][i] = Material_Attributes[i] + for i in material_attributes: + meta_data_dict_cardano["721"][""][name][i] = material_attributes[i] # Custom Fields: - if enableCustomFields: - for i in custom_Fields: - metaDataDictCardano["721"][""][name][i] = custom_Fields[i] + if enable_custom_fields: + for i in custom_fields: + meta_data_dict_cardano["721"][""][name][i] = custom_fields[i] - sendMetaDataToJson(metaDataDictCardano, cardanoMetadataPath, name) + send_metadata_to_json( + meta_data_dict_cardano, + cardano_metadata_path, + name + ) # Solana Template -def createSolanaMetaData(name, Order_Num, NFT_DNA, NFT_Variants, Material_Attributes, custom_Fields, enableCustomFields, - solana_description, solanaMetadataPath): - metaDataDictSolana = {"name": name, "symbol": "", "description": solana_description, "seller_fee_basis_points": None, - "image": "", "animation_url": "", "external_url": ""} +def createSolanaMetaData( + name, + order_num, + nft_dna, + nft_variants, + material_attributes, + custom_fields, + enable_custom_fields, + solana_description, + solana_metadata_path +): + metadata_dict_solana = { + "name": name, + "symbol": "", + "description": solana_description, + "seller_fee_basis_points": None, + "image": "", + "animation_url": "", + "external_url": "" + } attributes = [] # Variant and Attributes: - for i in NFT_Variants: + for i in nft_variants: dictionary = { "trait_type": i, - "value": stripNums(NFT_Variants[i]) + "value": strip_nums(nft_variants[i]) } attributes.append(dictionary) # Material Variants and Attributes: - for i in Material_Attributes: + for i in material_attributes: dictionary = { "trait_type": i, - "value": Material_Attributes[i] + "value": material_attributes[i] } attributes.append(dictionary) # Custom Fields: - if enableCustomFields: - for i in custom_Fields: + if enable_custom_fields: + for i in custom_fields: dictionary = { "trait_type": i, - "value": custom_Fields[i] + "value": custom_fields[i] } attributes.append(dictionary) - metaDataDictSolana["attributes"] = attributes - metaDataDictSolana["collection"] = { + metadata_dict_solana["attributes"] = attributes + metadata_dict_solana["collection"] = { "name": "", "family": "" } - metaDataDictSolana["properties"] = { + metadata_dict_solana["properties"] = { "files": [{"uri": "", "type": ""}], "category": "", "creators": [{"address": "", "share": None}] } - sendMetaDataToJson(metaDataDictSolana, solanaMetadataPath, name) + send_metadata_to_json( + metadata_dict_solana, + solana_metadata_path, + name + ) # ERC721 Template -def createErc721MetaData(name, Order_Num, NFT_DNA, NFT_Variants, Material_Attributes, custom_Fields, enableCustomFields, - erc721_description, erc721MetadataPath): - metaDataDictErc721 = { +def create_erc721_meta_data( + name, + order_num, + nft_dna, + nft_variants, + material_attributes, + custom_fields, + enable_custom_fields, + erc721_description, + erc721_metadata_path +): + + metadata_dict_erc721 = { "name": name, "description": erc721_description, "image": "", @@ -111,33 +156,36 @@ def createErc721MetaData(name, Order_Num, NFT_DNA, NFT_Variants, Material_Attrib attributes = [] # Variants and Attributes: - for i in NFT_Variants: + for i in nft_variants: dictionary = { "trait_type": i, - "value": stripNums(NFT_Variants[i]) + "value": strip_nums(nft_variants[i]) } attributes.append(dictionary) # Material Variants and Attributes: - for i in Material_Attributes: + for i in material_attributes: dictionary = { "trait_type": i, - "value": Material_Attributes[i] + "value": material_attributes[i] } attributes.append(dictionary) # Custom Fields: - if enableCustomFields: - for i in custom_Fields: + if enable_custom_fields: + for i in custom_fields: dictionary = { "trait_type": i, - "value": custom_Fields[i] + "value": custom_fields[i] } attributes.append(dictionary) - metaDataDictErc721["attributes"] = attributes - - sendMetaDataToJson(metaDataDictErc721, erc721MetadataPath, name) + metadata_dict_erc721["attributes"] = attributes + send_metadata_to_json( + metadata_dict_erc721, + erc721_metadata_path, + name + ) diff --git a/main/refactorer.py b/main/refactorer.py index d842e05..b29f1c7 100644 --- a/main/refactorer.py +++ b/main/refactorer.py @@ -1,48 +1,46 @@ # Purpose: # This file goes through all batches, renames, and sorts all nft files to a Complete_Collection folder in Blend_My_NFTs -import bpy import os import json import shutil -from .helpers import TextColors, removeList, remove_file_by_extension +from .helpers import remove_file_by_extension -def reformatNFTCollection(refactor_panel_input): - completeCollPath = os.path.join(refactor_panel_input.save_path, "Blend_My_NFTs Output", "Complete_Collection") +def reformat_nft_collection(refactor_panel_input): + complete_coll_path = os.path.join(refactor_panel_input.save_path, "Blend_My_NFTs Output", "Complete_Collection") - if not os.path.exists(completeCollPath): - os.mkdir(completeCollPath) + if not os.path.exists(complete_coll_path): + os.mkdir(complete_coll_path) - batchListDirty = os.listdir(refactor_panel_input.nftBatch_save_path) - batchList = remove_file_by_extension(batchListDirty) + batch_list_dirty = os.listdir(refactor_panel_input.nft_batch_save_path) + batch_list = remove_file_by_extension(batch_list_dirty) collection_info = {"Total Time": 0} - for folder in batchList: - batch_info = json.load(open(os.path.join(refactor_panel_input.nftBatch_save_path, folder, "batch_info.json"))) + for folder in batch_list: + batch_info = json.load(open(os.path.join(refactor_panel_input.nft_batch_save_path, folder, "batch_info.json"))) collection_info[os.path.basename(folder)] = batch_info collection_info["Total Time"] = collection_info["Total Time"] + batch_info["Batch Render Time"] - fileListDirty = os.listdir(os.path.join(refactor_panel_input.nftBatch_save_path, folder)) - filelist = remove_file_by_extension(fileListDirty) + file_list_dirty = os.listdir(os.path.join(refactor_panel_input.nft_batch_save_path, folder)) + filelist = remove_file_by_extension(file_list_dirty) for mediaTypeFolder in filelist: if mediaTypeFolder != "batch_info.json": - mediaTypeFolderDir = os.path.join(refactor_panel_input.nftBatch_save_path, folder, mediaTypeFolder) + media_type_folder_dir = os.path.join(refactor_panel_input.nft_batch_save_path, folder, mediaTypeFolder) - for i in os.listdir(mediaTypeFolderDir): - destination = os.path.join(completeCollPath, mediaTypeFolder) + for i in os.listdir(media_type_folder_dir): + destination = os.path.join(complete_coll_path, mediaTypeFolder) if not os.path.exists(destination): os.makedirs(destination) - shutil.move(os.path.join(mediaTypeFolderDir, i), destination) + shutil.move(os.path.join(media_type_folder_dir, i), destination) collection_info = json.dumps(collection_info, indent=1, ensure_ascii=True) - with open(os.path.join(completeCollPath, "collection_info.json"), 'w') as outfile: + with open(os.path.join(complete_coll_path, "collection_info.json"), 'w') as outfile: outfile.write(collection_info + '\n') print(f"All NFT files stored and sorted to the Complete_Collection folder in {refactor_panel_input.save_path}") - shutil.rmtree(refactor_panel_input.nftBatch_save_path) - + shutil.rmtree(refactor_panel_input.nft_batch_save_path) From de079b94efc23c8501ed465a8cc832491e67ceb8 Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Wed, 24 Aug 2022 13:29:47 -0400 Subject: [PATCH 09/12] Resolving compatibility issues --- __init__.py | 1 + main/dna_generator.py | 4 ++-- main/exporter.py | 12 +++++++----- main/helpers.py | 2 +- main/logic.py | 1 + 5 files changed, 12 insertions(+), 8 deletions(-) diff --git a/__init__.py b/__init__.py index 640b43a..7c755fb 100644 --- a/__init__.py +++ b/__init__.py @@ -667,6 +667,7 @@ class ResumeFailedBatch(bpy.types.Operator): sender_from=render_settings["sender_from"], email_password=render_settings["email_password"], receiver_to=render_settings["receiver_to"], + enable_debug=render_settings["enable_debug"], fail_state=_fail_state, diff --git a/main/dna_generator.py b/main/dna_generator.py index 62a30c1..d98060d 100644 --- a/main/dna_generator.py +++ b/main/dna_generator.py @@ -110,7 +110,7 @@ def generate_nft_dna( # print(f"Rarity DNA: {single_dna}") if enable_logic: - single_dna = logic.logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity, enable_materials) + single_dna = logic.logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity) # print(f"Logic DNA: {single_dna}") if enable_materials: @@ -310,7 +310,7 @@ def send_to_record( print( f"\n{TextColors.OK}Blend_My_NFTs Success:\n" - f"{len(data_dictionary['DNAList'])} NFT DNA saved to {nft_record_save_path}. NFT DNA Successfully " + f"{len(data_dictionary['dna_list'])} NFT DNA saved to {nft_record_save_path}. NFT DNA Successfully " f"created.\n{TextColors.RESET}") except Exception: diff --git a/main/exporter.py b/main/exporter.py index 2a70c35..fa3d134 100644 --- a/main/exporter.py +++ b/main/exporter.py @@ -95,6 +95,8 @@ def save_generation_state(input): "email_password": input.email_password, "receiver_to": input.receiver_to, + "enable_debug": input.enable_debug, + "custom_fields": input.custom_fields, }, }) @@ -107,8 +109,8 @@ def save_completed(full_single_dna, a, x, batch_json_save_path, batch_to_generat file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batch_to_generate)) batch = json.load(open(file_name)) - index = batch["BatchDNAList"].index(a) - batch["BatchDNAList"][index][full_single_dna]["Complete"] = True + index = batch["batch_dna_list"].index(a) + batch["batch_dna_list"][index][full_single_dna]["complete"] = True batch["Generation Save"][-1]["DNA Generated"] = x save_batch(batch, file_name) @@ -315,7 +317,7 @@ def render_and_save_nfts(input): # Generation/Rendering: if input.enable_images: - print(f"{TextColors.OK}-------- Image --------{TextColors.RESET}") + print(f"\n{TextColors.OK}-------- Image --------{TextColors.RESET}") image_render_time_start = time.time() @@ -342,7 +344,7 @@ def render_and_save_nfts(input): ) if input.enable_animations: - print(f"{TextColors.OK}-------- Animation --------{TextColors.RESET}") + print(f"\n{TextColors.OK}-------- Animation --------{TextColors.RESET}") animation_render_time_start = time.time() @@ -394,7 +396,7 @@ def render_and_save_nfts(input): ) if input.enable_models: - print(f"{TextColors.OK}-------- 3D Model --------{TextColors.RESET}") + print(f"\n{TextColors.OK}-------- 3D Model --------{TextColors.RESET}") model_generation_time_start = time.time() diff --git a/main/helpers.py b/main/helpers.py index 870adfa..15e9f25 100644 --- a/main/helpers.py +++ b/main/helpers.py @@ -351,7 +351,7 @@ def check_failed_batches(batch_json_save_path): for i in batch_folders: batch = json.load(open(os.path.join(batch_json_save_path, i))) - nfts_in_batch = batch["NFTs_in_Batch"] + nfts_in_batch = batch["nfts_in_batch"] if "Generation Save" in batch: dna_generated = batch["Generation Save"][-1]["DNA Generated"] if dna_generated is not None and dna_generated < nfts_in_batch: diff --git a/main/logic.py b/main/logic.py index 7f2b0de..86dafc0 100644 --- a/main/logic.py +++ b/main/logic.py @@ -261,6 +261,7 @@ def logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity): did_reconstruct = False for rule in logic_file: # Items from 'IF' key for a given rule + print(logic_file) if_dict = create_dicts(hierarchy, logic_file[rule]["IF"], "IF") result_dict_type = "" From 1c26cea5fc02b782c522d5a0a1516be84bf38d87 Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Thu, 25 Aug 2022 09:41:22 -0400 Subject: [PATCH 10/12] Trying to implement logging for debug --- __init__.py | 24 +++++++++++++++++++++--- main/dna_generator.py | 43 +++++++++++++++++++++++++++++-------------- main/exporter.py | 36 +++++++++++++++++++++--------------- main/helpers.py | 17 ----------------- main/intermediate.py | 4 +--- main/logic.py | 1 - 6 files changed, 72 insertions(+), 53 deletions(-) diff --git a/__init__.py b/__init__.py index 7c755fb..5655103 100644 --- a/__init__.py +++ b/__init__.py @@ -160,7 +160,9 @@ class BMNFTData: sender_from: str email_password: str receiver_to: str + enable_debug: bool + log_path: str custom_fields: dict = None fail_state: Any = False @@ -225,7 +227,9 @@ def getBMNFTData(): sender_from=bpy.context.scene.input_tool.sender_from, email_password=bpy.context.scene.input_tool.email_password, receiver_to=bpy.context.scene.input_tool.receiver_to, - enable_debug=bpy.context.scene.input_tool.enable_debug + + enable_debug=bpy.context.scene.input_tool.enable_debug, + log_path=bpy.context.scene.input_tool.log_path, ) return data @@ -540,6 +544,13 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): description="Allows you to run Blend_My_NFTs without generating any content files and includes more " "console information." ) + log_path: bpy.props.StringProperty( + name="Debug Log Path", + description="Path where BMNFT_Log.txt is located.", + default="", + maxlen=1024, + subtype="FILE_PATH" + ) # API Panel properties: api_key: bpy.props.StringProperty( @@ -669,6 +680,7 @@ class ResumeFailedBatch(bpy.types.Operator): receiver_to=render_settings["receiver_to"], enable_debug=render_settings["enable_debug"], + log_path=render_settings["log_path"], fail_state=_fail_state, failed_batch=_failed_batch, @@ -1069,6 +1081,8 @@ class BMNFTS_PT_Other(bpy.types.Panel): row = layout.row() row.prop(input_tool_scene, "enable_debug") + if bpy.context.scene.input_tool.enable_debug: + row.prop(input_tool_scene, "log_path") row = layout.row() @@ -1080,8 +1094,12 @@ class BMNFTS_PT_Other(bpy.types.Panel): icon='URL').url = "https://github.com/torrinworx/Blend_My_NFTs" row = layout.row() - row.operator("wm.url_open", text="YouTube Tutorials", - icon='URL').url = "https://www.youtube.com/watch?v=ygKJYz4BjRs&list=PLuVvzaanutXcYtWmPVKu2bx83EYNxLRsX" + row.operator( + "wm.url_open", + text="YouTube Tutorials", + icon='URL' + ).url = "https://www.youtube.com/watch?v=ygKJYz4BjRs&list=PLuVvzaanutXcYtWmPVKu2bx83EYNxLRsX" + row = layout.row() row.operator("wm.url_open", text="Join Our Discord Community!", icon='URL').url = "https://discord.gg/UpZt5Un57t" diff --git a/main/dna_generator.py b/main/dna_generator.py index d98060d..1b5314b 100644 --- a/main/dna_generator.py +++ b/main/dna_generator.py @@ -5,11 +5,19 @@ import os import time import json import random +import logging import traceback from functools import partial + from . import logic, material_generator, helpers from .helpers import TextColors +logging.basicConfig( + level=logging.INFO, + format='[%(levelname)s][%(asctime)s]\n%(message)s\n', + datefmt='%Y-%m-%d %H:%M:%S' +) + def generate_nft_dna( collection_size, @@ -18,7 +26,6 @@ def generate_nft_dna( logic_file, enable_materials, materials_file, - enable_debug ): """ Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the dna_list. @@ -95,7 +102,7 @@ def generate_nft_dna( single_dna = ''.join(single_dna.split('-', 1)) return single_dna - def singleCompleteDNA(): + def single_complete_dna(): """ This function applies Rarity and Logic to a single DNA created by createDNASingle() if Rarity or Logic specified """ @@ -103,21 +110,25 @@ def generate_nft_dna( single_dna = "" if not enable_rarity: single_dna = create_dna_random(hierarchy) - # print("============") - # print(f"Original DNA: {single_dna}") + logging.debug(f"============\nOriginal DNA: {single_dna}") + print("============") + print(f"Original DNA: {single_dna}") + if enable_rarity: single_dna = create_dna_rarity(hierarchy) - # print(f"Rarity DNA: {single_dna}") + logging.debug(f"Rarity DNA: {single_dna}") + print(f"Rarity DNA: {single_dna}") if enable_logic: single_dna = logic.logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity) - # print(f"Logic DNA: {single_dna}") + logging.debug(f"Logic DNA: {single_dna}") + print(f"Logic DNA: {single_dna}") if enable_materials: single_dna = material_generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity) - # print(f"Materials DNA: {single_dna}") - - # print("============\n") + logging.debug(f"Materials DNA: {single_dna}\n============\n") + print(f"Materials DNA: {single_dna}") + print("============\n") return single_dna @@ -129,7 +140,7 @@ def generate_nft_dna( dna_set_return = set() for i in range(collection_size): - dna_push_to_list = partial(singleCompleteDNA) + dna_push_to_list = partial(single_complete_dna) dna_set_return |= {''.join([dna_push_to_list()]) for _ in range(collection_size - len(dna_set_return))} @@ -151,8 +162,6 @@ def generate_nft_dna( dna_list = create_dna_list() - # Messages: - helpers.raise_warning_collection_size(dna_list, collection_size) # Data stored in batchDataDictionary: @@ -234,7 +243,8 @@ def send_to_record( materials_file, blend_my_nfts_output, batch_json_save_path, - enable_debug + enable_debug, + log_path ): """ Creates NFTRecord.json file and sends "batch_data_dictionary" to it. NFTRecord.json is a permanent record of all DNA @@ -243,6 +253,12 @@ def send_to_record( repeat DNA. """ + if enable_debug: + logging.basicConfig( + filename=os.path.join(log_path, "BMNFTS_Log.txt"), + level=logging.DEBUG + ) + # Checking Scene is compatible with BMNFTs: helpers.check_scene() @@ -280,7 +296,6 @@ def send_to_record( logic_file, enable_materials, materials_file, - enable_debug, ) nft_record_save_path = os.path.join(blend_my_nfts_output, "NFTRecord.json") diff --git a/main/exporter.py b/main/exporter.py index fa3d134..e51e3b3 100644 --- a/main/exporter.py +++ b/main/exporter.py @@ -96,6 +96,7 @@ def save_generation_state(input): "receiver_to": input.receiver_to, "enable_debug": input.enable_debug, + "log_path": input.log_path, "custom_fields": input.custom_fields, }, @@ -258,9 +259,9 @@ def render_and_save_nfts(input): except KeyError: raise TypeError( f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes to " - f"your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read your " - f"scene. For more information see:{TextColors.RESET}" + f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes " + f"to your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read " + f"your scene. For more information see:{TextColors.RESET}" f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) @@ -354,7 +355,7 @@ def render_and_save_nfts(input): if not os.path.exists(animation_folder): os.makedirs(animation_folder) - if input.animation_file_format =="MP4": + if input.animation_file_format == 'MP4': bpy.context.scene.render.filepath = animation_path bpy.context.scene.render.image_settings.file_format = "FFMPEG" @@ -362,7 +363,7 @@ def render_and_save_nfts(input): bpy.context.scene.render.ffmpeg.codec = 'H264' bpy.ops.render.render(animation=True) - elif input.animation_file_format =='PNG': + elif input.animation_file_format == 'PNG': if not os.path.exists(animation_path): os.makedirs(animation_path) @@ -370,7 +371,7 @@ def render_and_save_nfts(input): bpy.context.scene.render.image_settings.file_format = input.animation_file_format bpy.ops.render.render(animation=True) - elif input.animation_file_format =='TIFF': + elif input.animation_file_format == 'TIFF': if not os.path.exists(animation_path): os.makedirs(animation_path) @@ -421,7 +422,7 @@ def render_and_save_nfts(input): # if obj.name in remove_objects: # obj.select_set(False) - if input.model_file_format =='GLB': + if input.model_file_format == 'GLB': check_failed_exists(f"{model_path}.glb") bpy.ops.export_scene.gltf( filepath=f"{model_path}.glb", @@ -430,7 +431,7 @@ def render_and_save_nfts(input): export_keep_originals=True, use_selection=True ) - if input.model_file_format =='GLTF_SEPARATE': + if input.model_file_format == 'GLTF_SEPARATE': check_failed_exists(f"{model_path}.gltf") check_failed_exists(f"{model_path}.bin") bpy.ops.export_scene.gltf( @@ -440,7 +441,7 @@ def render_and_save_nfts(input): export_keep_originals=True, use_selection=True ) - if input.model_file_format =='GLTF_EMBEDDED': + if input.model_file_format == 'GLTF_EMBEDDED': check_failed_exists(f"{model_path}.gltf") bpy.ops.export_scene.gltf( filepath=f"{model_path}.gltf", @@ -449,35 +450,35 @@ def render_and_save_nfts(input): export_keep_originals=True, use_selection=True ) - elif input.model_file_format =='FBX': + elif input.model_file_format == 'FBX': check_failed_exists(f"{model_path}.fbx") bpy.ops.export_scene.fbx( filepath=f"{model_path}.fbx", check_existing=True, use_selection=True ) - elif input.model_file_format =='OBJ': + elif input.model_file_format == 'OBJ': check_failed_exists(f"{model_path}.obj") bpy.ops.export_scene.obj( filepath=f"{model_path}.obj", check_existing=True, use_selection=True, ) - elif input.model_file_format =='X3D': + elif input.model_file_format == 'X3D': check_failed_exists(f"{model_path}.x3d") bpy.ops.export_scene.x3d( filepath=f"{model_path}.x3d", check_existing=True, use_selection=True ) - elif input.model_file_format =='STL': + elif input.model_file_format == 'STL': check_failed_exists(f"{model_path}.stl") bpy.ops.export_mesh.stl( filepath=f"{model_path}.stl", check_existing=True, use_selection=True ) - elif input.model_file_format =='VOX': + elif input.model_file_format == 'VOX': check_failed_exists(f"{model_path}.vox") bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox") @@ -577,7 +578,12 @@ def render_and_save_nfts(input): batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1, "Average time per generation": batch_complete_time / x - 1} - batch_info_folder = os.path.join(input.nft_batch_save_path, "Batch" + str(input.batch_to_generate), "batch_info.json") + batch_info_folder = os.path.join( + input.nft_batch_save_path, + "Batch" + str(input.batch_to_generate), + "batch_info.json" + ) + save_batch(batch_info, batch_info_folder) # Send Email that Batch is complete: diff --git a/main/helpers.py b/main/helpers.py index 15e9f25..c86128d 100644 --- a/main/helpers.py +++ b/main/helpers.py @@ -10,23 +10,6 @@ from shutil import get_terminal_size from collections import Counter, defaultdict -# ======== ENABLE DEBUG ======== # - -# This section is used for debugging, coding, or general testing purposes. - - -def enable_debug(enable_debug_bool): - if enable_debug_bool: - import logging - - logging.basicConfig( - filename="./log.txt", - level=logging.DEBUG, - format='[%(levelname)s][%(asctime)s]\n%(message)s\n', - datefmt='%Y-%m-%d %H:%M:%S' - ) - - # ======== CONSTANTS ======== # # Constants are used for storing or updating constant values that may need to be changes depending on system diff --git a/main/intermediate.py b/main/intermediate.py index ae0eece..0088748 100644 --- a/main/intermediate.py +++ b/main/intermediate.py @@ -29,7 +29,6 @@ def send_to_record(input, reverse_order=False): "IF": item_list1.split(','), rule_type: item_list2.split(',') } - print(rule_type) num += 1 else: input.logic_file = {} @@ -42,8 +41,6 @@ def send_to_record(input, reverse_order=False): "IF": item_list1.split(','), rule_type: item_list2.split(',') } - print(rule_type) - num += 1 dna_generator.send_to_record( @@ -58,6 +55,7 @@ def send_to_record(input, reverse_order=False): input.blend_my_nfts_output, input.batch_json_save_path, input.enable_debug, + input.log_path ) diff --git a/main/logic.py b/main/logic.py index 86dafc0..7f2b0de 100644 --- a/main/logic.py +++ b/main/logic.py @@ -261,7 +261,6 @@ def logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity): did_reconstruct = False for rule in logic_file: # Items from 'IF' key for a given rule - print(logic_file) if_dict = create_dicts(hierarchy, logic_file[rule]["IF"], "IF") result_dict_type = "" From 7474847ec68bb66fd566225ed93f8dbbd7562951 Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Sat, 27 Aug 2022 19:41:41 -0400 Subject: [PATCH 11/12] Adding proper logging functionality - More general pep8 formatting - Renamed some more variables for pep8 - Modified import comments - Renamed functions - Added bpy.path.abspath() to material, logic, and debug save paths - Beginning to reformat files with new logging/console output system --- __init__.py | 155 +++++++++++++++++++++++++++++------------- main/dna_generator.py | 86 ++++++++++++----------- main/exporter.py | 2 + main/helpers.py | 2 + 4 files changed, 156 insertions(+), 89 deletions(-) diff --git a/__init__.py b/__init__.py index 5655103..e4ee8c0 100644 --- a/__init__.py +++ b/__init__.py @@ -17,15 +17,17 @@ LAST_UPDATED = "01:02PM, Aug 24th, 2022" # ======== Import handling ======== # +# Blender modules: import bpy from bpy.app.handlers import persistent -from bpy.props import (IntProperty, - BoolProperty, - CollectionProperty) +from bpy.props import (IntProperty, BoolProperty, CollectionProperty) + # Python modules: import os import sys import json +import logging +import tempfile import importlib import traceback from typing import Any @@ -35,7 +37,7 @@ from datetime import datetime, timezone # "a little hacky bs" - matt159 ;) sys.path.append(os.path.dirname(os.path.realpath(__file__))) -# Local file imports: +# Local modules: from main import \ helpers, \ dna_generator, \ @@ -51,6 +53,7 @@ from UILists import \ custom_metadata_ui_list, \ logic_ui_list +# Refresh Locals for development: if "bpy" in locals(): modules = { "helpers": helpers, @@ -79,7 +82,7 @@ dt = datetime.now(timezone.utc).astimezone() # Date Time in UTC local @persistent -def Refresh_UI(dummy1, dummy2): +def refresh_ui(dummy1, dummy2): """ Refreshes the UI upon user interacting with Blender (using depsgraph_update_post handler). Might be a better handler to use. @@ -106,7 +109,7 @@ def Refresh_UI(dummy1, dummy2): redraw_panel(refresh_panel_classes) -bpy.app.handlers.depsgraph_update_post.append(Refresh_UI) +bpy.app.handlers.depsgraph_update_post.append(refresh_ui) # ======== Defining BMNFTs Data ======== # @@ -164,6 +167,8 @@ class BMNFTData: enable_debug: bool log_path: str + enable_dry_run: str + custom_fields: dict = None fail_state: Any = False failed_batch: Any = None @@ -174,13 +179,14 @@ class BMNFTData: self.custom_fields = {} -def getBMNFTData(): +def get_bmnft_data(): _save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) _Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path) + # IMPORTANT: if a new directory variable is ever added, use 'bpy.path.abspath' instead of 'os.path.abspath'. data = BMNFTData( nft_name=bpy.context.scene.input_tool.nft_name, - save_path=_save_path, + save_path=bpy.path.abspath(_save_path), # Converting from Blender's relative path system to absolute. nfts_per_batch=bpy.context.scene.input_tool.nfts_per_batch, batch_to_generate=bpy.context.scene.input_tool.batch_to_generate, collection_size=bpy.context.scene.input_tool.collection_size, @@ -193,7 +199,7 @@ def getBMNFTData(): enable_logic=bpy.context.scene.input_tool.enable_logic, enable_logic_json=bpy.context.scene.input_tool.enable_logic_json, - logic_file=bpy.context.scene.input_tool.logic_file, + logic_file=bpy.path.abspath(bpy.context.scene.input_tool.logic_file), enable_images=bpy.context.scene.input_tool.image_bool, image_file_format=bpy.context.scene.input_tool.image_enum, @@ -229,7 +235,9 @@ def getBMNFTData(): receiver_to=bpy.context.scene.input_tool.receiver_to, enable_debug=bpy.context.scene.input_tool.enable_debug, - log_path=bpy.context.scene.input_tool.log_path, + log_path=bpy.path.abspath(bpy.context.scene.input_tool.log_path), + + enable_dry_run=bpy.context.scene.input_tool.enable_dry_run ) return data @@ -252,7 +260,7 @@ def make_directories(save_path): return Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path -def runAsHeadless(): +def run_as_headless(): """ For use when running from the command line. """ @@ -313,26 +321,26 @@ def runAsHeadless(): settings.collection_size = int(pairs[1][1]) settings.nfts_per_batch = int(pairs[2][1]) settings.save_path = pairs[3][1] - settings.enable_rarity = pairs[4][1]=='True' - settings.enable_logic = pairs[5][1]=='True' + settings.enable_rarity = pairs[4][1] == 'True' + settings.enable_logic = pairs[5][1] == 'True' settings.enableLogicJson = pairs[6][1] == 'True' settings.logic_file = pairs[7][1] - settings.image_bool = pairs[8][1]=='True' + settings.image_bool = pairs[8][1] == 'True' settings.image_enum = pairs[9][1] - settings.animation_bool = pairs[10][1]=='True' + settings.animation_bool = pairs[10][1] == 'True' settings.animation_enum = pairs[11][1] - settings.model_bool = pairs[12][1]=='True' + settings.model_bool = pairs[12][1] == 'True' settings.model_enum = pairs[13][1] settings.batch_to_generate = int(pairs[14][1]) - settings.cardano_metadata_bool = pairs[15][1]=='True' + settings.cardano_metadata_bool = pairs[15][1] == 'True' settings.cardano_description = pairs[16][1] - settings.erc721_metadata = pairs[17][1]=='True' + settings.erc721_metadata = pairs[17][1] == 'True' settings.erc721_description = pairs[18][1] - settings.solana_metadata_bool = pairs[19][1]=='True' + settings.solana_metadata_bool = pairs[19][1] == 'True' settings.solanaDescription = pairs[20][1] - settings.enable_custom_fields = pairs[21][1]=='True' + settings.enable_custom_fields = pairs[21][1] == 'True' settings.custom_fields_file = pairs[22][1] - settings.enable_materials = pairs[23][1]=='True' + settings.enable_materials = pairs[23][1] == 'True' settings.materials_file = pairs[24][1] if args.save_path: @@ -341,7 +349,7 @@ def runAsHeadless(): if args.batch_number: settings.batch_to_generate = args.batch_number - input = getBMNFTData() + input = get_bmnft_data() if args.batch_data_path: input.batch_json_save_path = args.batch_data_path @@ -356,6 +364,42 @@ def runAsHeadless(): refactorer.reformat_nft_collection(input) +def activate_logging(): + """ + Used as an intermediate activated at runtime of the following operators: CreateData, ExportNFTs, ResumeFailedBatch, + RefactorBatches, and ExportSettings. Must be independent of 'input' class to be safe, gets variables directly from + bpy. + """ + + log_path = bpy.context.scene.input_tool.log_path + if log_path: + file_handler = logging.FileHandler(os.path.join(log_path, 'BMNFTs_Log.txt'), 'a') + else: + file_handler = logging.FileHandler(os.path.join(tempfile.gettempdir(), 'BMNFTs_Log.txt'), 'a') + + formatter = logging.Formatter( + '[%(asctime)s] [%(levelname)s] [%(filename)s > %(funcName)s() > Line:%(lineno)d]\n%(message)s\n' + ) + file_handler.setFormatter(formatter) + + log = logging.getLogger() + for handler in log.handlers[:]: + if isinstance(handler, logging.FileHandler): + log.removeHandler(handler) + if isinstance(handler, logging.StreamHandler): + log.removeHandler(handler) + log.addHandler(file_handler) + + # Record log to console: + console_handler = logging.StreamHandler(sys.stdout) + log.addHandler(console_handler) + + if bpy.context.scene.input_tool.enable_debug: + logging.getLogger().setLevel(logging.DEBUG) + else: + logging.getLogger().setLevel(logging.INFO) + + # ======== User input Property Group ======== # class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): # Create NFT Data Panel: @@ -498,8 +542,6 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="FILE_PATH" ) - # TODO: Add 'Other' panel inputs to Headless functionality. - # Other Panel: enable_auto_save: bpy.props.BoolProperty( name="Auto Save Before Generation", @@ -541,8 +583,8 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): enable_debug: bpy.props.BoolProperty( name="Enable Debug Mode", - description="Allows you to run Blend_My_NFTs without generating any content files and includes more " - "console information." + description="Allows you to run Blend_My_NFTs with debugging console messages saved to a BMNFTs_Log.txt " + "file." ) log_path: bpy.props.StringProperty( name="Debug Log Path", @@ -552,6 +594,11 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="FILE_PATH" ) + enable_dry_run: bpy.props.BoolProperty( + name="Enable Dry Run", + description="Allows you to run Blend_My_NFTs without generating any content files." + ) + # API Panel properties: api_key: bpy.props.StringProperty( name="API Key", @@ -560,7 +607,7 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): # ======== Main Operators ======== # -class Createdata(bpy.types.Operator): +class CreateData(bpy.types.Operator): bl_idname = 'create.data' bl_label = 'Create Data' bl_description = 'Creates NFT Data. Run after any changes were made to scene. All previous data will be ' \ @@ -572,8 +619,10 @@ class Createdata(bpy.types.Operator): name="Reverse Order") def execute(self, context): + activate_logging() + # Handling Custom Fields UIList input: - input = getBMNFTData() + input = get_bmnft_data() if input.enable_logic: if input.enable_logic_json and not input.logic_file: @@ -600,8 +649,9 @@ class ExportNFTs(bpy.types.Operator): name="Reverse Order") def execute(self, context): - input = getBMNFTData() - # Handling Custom Fields UIList input: + activate_logging() + + input = get_bmnft_data() intermediate.render_and_save_nfts(input) @@ -617,17 +667,19 @@ class ResumeFailedBatch(bpy.types.Operator): bl_options = {"REGISTER", "UNDO"} def execute(self, context): + activate_logging() + _save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) _Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path) _batchToGenerate = bpy.context.scene.input_tool.batch_to_generate file_name = os.path.join(_batch_json_save_path, "Batch{}.json".format(_batchToGenerate)) - batchData = json.load(open(file_name)) + batch_data = json.load(open(file_name)) _fail_state, _failed_batch, _failed_dna, _failed_dna_index = helpers.check_failed_batches(_batch_json_save_path) - render_settings = batchData["Generation Save"][-1]["Render_Settings"] + render_settings = batch_data["Generation Save"][-1]["Render_Settings"] input = BMNFTData( nft_name=render_settings["nft_name"], @@ -682,6 +734,8 @@ class ResumeFailedBatch(bpy.types.Operator): enable_debug=render_settings["enable_debug"], log_path=render_settings["log_path"], + enable_dry_run=render_settings["enable_dry_run"], + fail_state=_fail_state, failed_batch=_failed_batch, failed_dna=_failed_dna, @@ -709,8 +763,9 @@ class RefactorBatches(bpy.types.Operator): name="Reverse Order") def execute(self, context): - # Passing info to main functions for refactoring: - refactorer.reformat_nft_collection(getBMNFTData()) + activate_logging() + + refactorer.reformat_nft_collection(get_bmnft_data()) return {"FINISHED"} def invoke(self, context, event): @@ -725,6 +780,8 @@ class ExportSettings(bpy.types.Operator): bl_options = {"REGISTER", "UNDO"} def execute(self, context): + activate_logging() + save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) filename = "config.cfg" @@ -1082,8 +1139,10 @@ class BMNFTS_PT_Other(bpy.types.Panel): row = layout.row() row.prop(input_tool_scene, "enable_debug") if bpy.context.scene.input_tool.enable_debug: + row = layout.row() row.prop(input_tool_scene, "log_path") - + row = layout.row() + row.prop(input_tool_scene, "enable_dry_run") row = layout.row() row = layout.row() @@ -1111,20 +1170,20 @@ class BMNFTS_PT_Other(bpy.types.Panel): # ======== Blender add-on register/unregister handling ======== # classes = ( # Property Group Classes: - BMNFTS_PGT_Input_Properties, + BMNFTS_PGT_Input_Properties, - # Operator Classes: - Createdata, - ExportNFTs, - ResumeFailedBatch, - RefactorBatches, - ExportSettings, + # Operator Classes: + CreateData, + ExportNFTs, + ResumeFailedBatch, + RefactorBatches, + ExportSettings, - # Panel Classes: - BMNFTS_PT_CreateData, - BMNFTS_PT_GenerateNFTs, - BMNFTS_PT_Refactor, - BMNFTS_PT_Other, + # Panel Classes: + BMNFTS_PT_CreateData, + BMNFTS_PT_GenerateNFTs, + BMNFTS_PT_Refactor, + BMNFTS_PT_Other, ) + custom_metadata_ui_list.classes_Custom_Metadata_UIList + logic_ui_list.classes_Logic_UIList @@ -1157,4 +1216,4 @@ def unregister(): if __name__ == '__main__': register() - runAsHeadless() + run_as_headless() diff --git a/main/dna_generator.py b/main/dna_generator.py index 1b5314b..cbe9779 100644 --- a/main/dna_generator.py +++ b/main/dna_generator.py @@ -12,11 +12,7 @@ from functools import partial from . import logic, material_generator, helpers from .helpers import TextColors -logging.basicConfig( - level=logging.INFO, - format='[%(levelname)s][%(asctime)s]\n%(message)s\n', - datefmt='%Y-%m-%d %H:%M:%S' -) +log = logging.getLogger(__name__) def generate_nft_dna( @@ -110,25 +106,33 @@ def generate_nft_dna( single_dna = "" if not enable_rarity: single_dna = create_dna_random(hierarchy) - logging.debug(f"============\nOriginal DNA: {single_dna}") - print("============") - print(f"Original DNA: {single_dna}") + log.debug( + f"\n================" + f"\nOriginal DNA: {single_dna}" + ) if enable_rarity: single_dna = create_dna_rarity(hierarchy) - logging.debug(f"Rarity DNA: {single_dna}") - print(f"Rarity DNA: {single_dna}") + log.debug( + f"\n================" + f"\nRarity DNA: {single_dna}" + ) if enable_logic: single_dna = logic.logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity) - logging.debug(f"Logic DNA: {single_dna}") - print(f"Logic DNA: {single_dna}") + log.debug( + f"\n================" + f"\nLogic DNA: {single_dna}" + ) if enable_materials: single_dna = material_generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity) - logging.debug(f"Materials DNA: {single_dna}\n============\n") - print(f"Materials DNA: {single_dna}") - print("============\n") + log.debug( + f"\n================" + f"\nMaterials DNA: {single_dna}" + f"\n================\n" + + ) return single_dna @@ -179,9 +183,9 @@ def make_batches( batch_json_save_path ): """ - Sorts through all the batches and outputs a given number of batches depending on collection_size and nfts_per_batch. - These files are then saved as Batch#.json files to batch_json_save_path - """ + Sorts through all the batches and outputs a given number of batches depending on collection_size and nfts_per_batch. + These files are then saved as Batch#.json files to batch_json_save_path + """ # Clears the Batch Data folder of Batches: batch_list = os.listdir(batch_json_save_path) @@ -197,7 +201,6 @@ def make_batches( nft_record_save_path = os.path.join(blend_my_nf_ts_output, "NFTRecord.json") data_dictionary = json.load(open(nft_record_save_path)) - num_nfts_generated = data_dictionary["num_nfts_generated"] hierarchy = data_dictionary["hierarchy"] dna_list = data_dictionary["dna_list"] @@ -206,8 +209,10 @@ def make_batches( if remainder_dna > 0: num_batches += 1 - print(f"To generate batches of {nfts_per_batch} DNA sequences per batch, with a total of {num_nfts_generated}" - f" possible NFT DNA sequences, the number of batches generated will be {num_batches}") + log.info( + f"\nGenerating {num_batches} batch files. If the last batch isn't filled all the way the program will " + f"operate normally." + ) batches_dna_list = [] @@ -253,38 +258,37 @@ def send_to_record( repeat DNA. """ - if enable_debug: - logging.basicConfig( - filename=os.path.join(log_path, "BMNFTS_Log.txt"), - level=logging.DEBUG - ) - # Checking Scene is compatible with BMNFTs: helpers.check_scene() # Messages: - print( - f"\n{TextColors.OK}======== Creating NFT Data ========{TextColors.RESET}" - f"\nGenerating {collection_size} NFT DNA" + log.info( + f"\n{TextColors.OK}======== Creating NFT Data ({collection_size} DNA) ========{TextColors.RESET}" ) if not enable_rarity and not enable_logic: - print( - f"{TextColors.OK}NFT DNA will be determined randomly, no special properties or parameters are " - f"applied.\n{TextColors.RESET}") + log.info( + f"\n - NFT DNA will be determined randomly, no special properties or parameters are " + f"applied." + ) if enable_rarity: - print( - f"{TextColors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account." - f"{TextColors.RESET}" + log.info( + f"\n - Rarity is ON. Weights listed in .blend scene will be taken into account." + f"" ) if enable_logic: - print( - f"{TextColors.OK}Logic is ON. {len(list(logic_file.keys()))} rules detected and applied." - f"{TextColors.RESET}" + log.info( + f"\n - Logic is ON. {len(list(logic_file.keys()))} rules detected, implementation will " + f"be attempted." ) + if enable_materials: + log.info( + f"\n - Materials are ON. {len(list(json.load(open(materials_file)).keys()))} materials " + f"instances detected, implementation will be attempted." + ) time_start = time.time() def create_nft_data(): @@ -339,7 +343,7 @@ def send_to_record( ) # Loading Animation: - loading = helpers.Loader(f'Creating NFT DNA...', '').start() + loading = helpers.Loader(f'\nCreating NFT DNA...', '').start() create_nft_data() make_batches(collection_size, nfts_per_batch, save_path, batch_json_save_path) loading.stop() @@ -347,5 +351,5 @@ def send_to_record( time_end = time.time() print( - f"{TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{TextColors.RESET}" + f"\n{TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{TextColors.RESET}" ) diff --git a/main/exporter.py b/main/exporter.py index e51e3b3..1bfc2dc 100644 --- a/main/exporter.py +++ b/main/exporter.py @@ -98,6 +98,8 @@ def save_generation_state(input): "enable_debug": input.enable_debug, "log_path": input.log_path, + "enable_dry_run": input.enable_dry_run, + "custom_fields": input.custom_fields, }, }) diff --git a/main/helpers.py b/main/helpers.py index c86128d..d35f220 100644 --- a/main/helpers.py +++ b/main/helpers.py @@ -36,6 +36,8 @@ def remove_file_by_extension(dirlist): return return_dirs +# TODO: fix colours in console logs and find a way to include coloured text in .txt file. + class TextColors: """ The colour of console messages. From 0931d9921414626ee0b97c0549c648dac203928d Mon Sep 17 00:00:00 2001 From: Torrin Leonard <82110564+torrinworx@users.noreply.github.com> Date: Sat, 27 Aug 2022 21:48:44 -0400 Subject: [PATCH 12/12] Refactoring print statements and exceptions - Reformatted exceptions to work with new logging system - Reformatted all print statements - Moved `activate_debug()` to helpers.py - Debug mode now shuts down all renders and 3D model exports to perform a 'Dry Run' to check that collections/objects/DNA still exist and work on render time. --- __init__.py | 54 ++------- main/dna_generator.py | 40 ++++--- main/exporter.py | 237 ++++++++++++++++++++----------------- main/helpers.py | 141 ++++++++++++++++------ main/intermediate.py | 26 ++-- main/logic.py | 30 +++-- main/material_generator.py | 28 +++-- main/refactorer.py | 5 +- 8 files changed, 322 insertions(+), 239 deletions(-) diff --git a/__init__.py b/__init__.py index e4ee8c0..f1c4b51 100644 --- a/__init__.py +++ b/__init__.py @@ -26,8 +26,6 @@ from bpy.props import (IntProperty, BoolProperty, CollectionProperty) import os import sys import json -import logging -import tempfile import importlib import traceback from typing import Any @@ -364,42 +362,6 @@ def run_as_headless(): refactorer.reformat_nft_collection(input) -def activate_logging(): - """ - Used as an intermediate activated at runtime of the following operators: CreateData, ExportNFTs, ResumeFailedBatch, - RefactorBatches, and ExportSettings. Must be independent of 'input' class to be safe, gets variables directly from - bpy. - """ - - log_path = bpy.context.scene.input_tool.log_path - if log_path: - file_handler = logging.FileHandler(os.path.join(log_path, 'BMNFTs_Log.txt'), 'a') - else: - file_handler = logging.FileHandler(os.path.join(tempfile.gettempdir(), 'BMNFTs_Log.txt'), 'a') - - formatter = logging.Formatter( - '[%(asctime)s] [%(levelname)s] [%(filename)s > %(funcName)s() > Line:%(lineno)d]\n%(message)s\n' - ) - file_handler.setFormatter(formatter) - - log = logging.getLogger() - for handler in log.handlers[:]: - if isinstance(handler, logging.FileHandler): - log.removeHandler(handler) - if isinstance(handler, logging.StreamHandler): - log.removeHandler(handler) - log.addHandler(file_handler) - - # Record log to console: - console_handler = logging.StreamHandler(sys.stdout) - log.addHandler(console_handler) - - if bpy.context.scene.input_tool.enable_debug: - logging.getLogger().setLevel(logging.DEBUG) - else: - logging.getLogger().setLevel(logging.INFO) - - # ======== User input Property Group ======== # class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): # Create NFT Data Panel: @@ -583,8 +545,8 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): enable_debug: bpy.props.BoolProperty( name="Enable Debug Mode", - description="Allows you to run Blend_My_NFTs with debugging console messages saved to a BMNFTs_Log.txt " - "file." + description="Allows you to run Blend_My_NFTs without generating any content files and enables debugging " + "console messages saved to a BMNFTs_Log.txt file." ) log_path: bpy.props.StringProperty( name="Debug Log Path", @@ -619,7 +581,7 @@ class CreateData(bpy.types.Operator): name="Reverse Order") def execute(self, context): - activate_logging() + helpers.activate_logging() # Handling Custom Fields UIList input: input = get_bmnft_data() @@ -649,7 +611,7 @@ class ExportNFTs(bpy.types.Operator): name="Reverse Order") def execute(self, context): - activate_logging() + helpers.activate_logging() input = get_bmnft_data() @@ -667,7 +629,7 @@ class ResumeFailedBatch(bpy.types.Operator): bl_options = {"REGISTER", "UNDO"} def execute(self, context): - activate_logging() + helpers.activate_logging() _save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) _Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path) @@ -763,7 +725,7 @@ class RefactorBatches(bpy.types.Operator): name="Reverse Order") def execute(self, context): - activate_logging() + helpers.activate_logging() refactorer.reformat_nft_collection(get_bmnft_data()) return {"FINISHED"} @@ -780,7 +742,7 @@ class ExportSettings(bpy.types.Operator): bl_options = {"REGISTER", "UNDO"} def execute(self, context): - activate_logging() + helpers.activate_logging() save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) filename = "config.cfg" @@ -1141,8 +1103,6 @@ class BMNFTS_PT_Other(bpy.types.Panel): if bpy.context.scene.input_tool.enable_debug: row = layout.row() row.prop(input_tool_scene, "log_path") - row = layout.row() - row.prop(input_tool_scene, "enable_dry_run") row = layout.row() row = layout.row() diff --git a/main/dna_generator.py b/main/dna_generator.py index cbe9779..f54a2ae 100644 --- a/main/dna_generator.py +++ b/main/dna_generator.py @@ -313,12 +313,15 @@ def send_to_record( os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data")) except FileNotFoundError: - raise FileNotFoundError( - f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " - f"the naming conventions and scene structure. For more information, see:\n{TextColors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"Data not saved to NFTRecord.json, file not found. Check that your save path, logic file path, or " + f"materials file path is correct. For more information, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) + raise + finally: loading.stop() @@ -327,20 +330,21 @@ def send_to_record( with open(nft_record_save_path, 'w') as outfile: outfile.write(ledger + '\n') - print( - f"\n{TextColors.OK}Blend_My_NFTs Success:\n" - f"{len(data_dictionary['dna_list'])} NFT DNA saved to {nft_record_save_path}. NFT DNA Successfully " - f"created.\n{TextColors.RESET}") + log.info( + f"\n{TextColors.OK}{len(data_dictionary['dna_list'])} NFT data successfully saved to:" + f"\n{nft_record_save_path}{TextColors.RESET}" + ) except Exception: - traceback.print_exc() - raise ( - f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " - f"the naming conventions and scene structure. For more information, " - f"see:\n{TextColors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " + f"the naming conventions and scene structure. For more information, " + f"see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) + raise # Loading Animation: loading = helpers.Loader(f'\nCreating NFT DNA...', '').start() @@ -350,6 +354,6 @@ def send_to_record( time_end = time.time() - print( - f"\n{TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{TextColors.RESET}" + log.info( + f"\n{TextColors.OK}TIME [Created and Saved NFT data]: {time_end - time_start}s.\n{TextColors.RESET}" ) diff --git a/main/exporter.py b/main/exporter.py index 1bfc2dc..377d57f 100644 --- a/main/exporter.py +++ b/main/exporter.py @@ -8,12 +8,16 @@ import ssl import time import json import smtplib +import logging import datetime import platform +import traceback from .helpers import TextColors, Loader from .metadata_templates import create_cardano_metadata, createSolanaMetaData, create_erc721_meta_data +log = logging.getLogger(__name__) + # Save info def save_batch(batch, file_name): @@ -145,7 +149,9 @@ def render_and_save_nfts(input): # If failed Batch is detected and user is resuming its generation: if input.fail_state: - print(f"{TextColors.ERROR}\nResuming Batch #{input.failed_batch}\n{TextColors.RESET}") + log.info( + f"{TextColors.OK}\nResuming Batch #{input.failed_batch}{TextColors.RESET}" + ) nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.failed_batch, input.batch_json_save_path) for a in range(input.failed_dna): del batch_dna_list[0] @@ -153,7 +159,9 @@ def render_and_save_nfts(input): # If user is generating the normal way: else: - print(f"\nGenerating Batch #{input.batch_to_generate}\n") + log.info( + f"{TextColors.OK}\n======== Generating Batch #{input.batch_to_generate} ========{TextColors.RESET}" + ) nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.batch_to_generate, input.batch_json_save_path) save_generation_state(input) x = 1 @@ -259,13 +267,15 @@ def render_and_save_nfts(input): bpy.data.collections[j].hide_render = True bpy.data.collections[j].hide_viewport = True except KeyError: - raise TypeError( + log.error( + f"\n{traceback.format_exc()}" f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes " f"to your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read " f"your scene. For more information see:{TextColors.RESET}" f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) + raise TypeError() dna_dictionary = match_dna_to_variant(single_dna) name = input.nft_name + "_" + str(order_num) @@ -275,14 +285,18 @@ def render_and_save_nfts(input): # ob = bpy.data.objects['Text'] # Object name # ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob - print(f"\n{TextColors.OK}======== Generating NFT {x}/{nfts_in_batch}: {name} ========{TextColors.RESET}") - print(f"\nVariants selected:") - print(f"{dna_dictionary}") + log.info( + f"\n{TextColors.OK}======== Generating NFT {x}/{nfts_in_batch}: {name} ========{TextColors.RESET}" + f"\nVariants selected:" + f"\n{dna_dictionary}" + ) if input.enable_materials: - print(f"\nMaterials selected:") - print(f"{material_dna_dictionary}") + log.info( + f"\nMaterials selected:" + f"\n{material_dna_dictionary}" + ) - print(f"\nDNA Code:{full_single_dna}") + log.info(f"\nDNA Code:{full_single_dna}") for c in dna_dictionary: collection = dna_dictionary[c] @@ -320,7 +334,7 @@ def render_and_save_nfts(input): # Generation/Rendering: if input.enable_images: - print(f"\n{TextColors.OK}-------- Image --------{TextColors.RESET}") + log.info(f"\n{TextColors.OK}-------- Image --------{TextColors.RESET}") image_render_time_start = time.time() @@ -332,7 +346,9 @@ def render_and_save_nfts(input): bpy.context.scene.render.filepath = image_path bpy.context.scene.render.image_settings.file_format = input.image_file_format - bpy.ops.render.render(write_still=True) + + if not input.enable_debug: + bpy.ops.render.render(write_still=True) # Loading Animation: loading = Loader(f'Rendering Image {x}/{nfts_in_batch}...', '').start() @@ -341,13 +357,13 @@ def render_and_save_nfts(input): image_render_time_end = time.time() - print( - f"{TextColors.OK}Rendered image in {image_render_time_end - image_render_time_start}s." + log.info( + f"{TextColors.OK}TIME [Rendered Image]: {image_render_time_end - image_render_time_start}s." f"\n{TextColors.RESET}" ) if input.enable_animations: - print(f"\n{TextColors.OK}-------- Animation --------{TextColors.RESET}") + log.info(f"\n{TextColors.OK}-------- Animation --------{TextColors.RESET}") animation_render_time_start = time.time() @@ -357,34 +373,35 @@ def render_and_save_nfts(input): if not os.path.exists(animation_folder): os.makedirs(animation_folder) - if input.animation_file_format == 'MP4': - bpy.context.scene.render.filepath = animation_path - bpy.context.scene.render.image_settings.file_format = "FFMPEG" + if not input.enable_debug: + if input.animation_file_format == 'MP4': + bpy.context.scene.render.filepath = animation_path + bpy.context.scene.render.image_settings.file_format = "FFMPEG" - bpy.context.scene.render.ffmpeg.format = 'MPEG4' - bpy.context.scene.render.ffmpeg.codec = 'H264' - bpy.ops.render.render(animation=True) + bpy.context.scene.render.ffmpeg.format = 'MPEG4' + bpy.context.scene.render.ffmpeg.codec = 'H264' + bpy.ops.render.render(animation=True) - elif input.animation_file_format == 'PNG': - if not os.path.exists(animation_path): - os.makedirs(animation_path) + elif input.animation_file_format == 'PNG': + if not os.path.exists(animation_path): + os.makedirs(animation_path) - bpy.context.scene.render.filepath = os.path.join(animation_path, name) - bpy.context.scene.render.image_settings.file_format = input.animation_file_format - bpy.ops.render.render(animation=True) + bpy.context.scene.render.filepath = os.path.join(animation_path, name) + bpy.context.scene.render.image_settings.file_format = input.animation_file_format + bpy.ops.render.render(animation=True) - elif input.animation_file_format == 'TIFF': - if not os.path.exists(animation_path): - os.makedirs(animation_path) + elif input.animation_file_format == 'TIFF': + if not os.path.exists(animation_path): + os.makedirs(animation_path) - bpy.context.scene.render.filepath = os.path.join(animation_path, name) - bpy.context.scene.render.image_settings.file_format = input.animation_file_format - bpy.ops.render.render(animation=True) + bpy.context.scene.render.filepath = os.path.join(animation_path, name) + bpy.context.scene.render.image_settings.file_format = input.animation_file_format + bpy.ops.render.render(animation=True) - else: - bpy.context.scene.render.filepath = animation_path - bpy.context.scene.render.image_settings.file_format = input.animation_file_format - bpy.ops.render.render(animation=True) + else: + bpy.context.scene.render.filepath = animation_path + bpy.context.scene.render.image_settings.file_format = input.animation_file_format + bpy.ops.render.render(animation=True) # Loading Animation: loading = Loader(f'Rendering Animation {x}/{nfts_in_batch}...', '').start() @@ -393,13 +410,13 @@ def render_and_save_nfts(input): animation_render_time_end = time.time() - print( - f"{TextColors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s." - f"\n{TextColors.RESET}" + log.info( + f"\n{TextColors.OK}TIME [Rendered Animation]: " + f"{animation_render_time_end - animation_render_time_start}s.{TextColors.RESET}" ) if input.enable_models: - print(f"\n{TextColors.OK}-------- 3D Model --------{TextColors.RESET}") + log.info(f"\n{TextColors.OK}-------- 3D Model --------{TextColors.RESET}") model_generation_time_start = time.time() @@ -424,65 +441,66 @@ def render_and_save_nfts(input): # if obj.name in remove_objects: # obj.select_set(False) - if input.model_file_format == 'GLB': - check_failed_exists(f"{model_path}.glb") - bpy.ops.export_scene.gltf( - filepath=f"{model_path}.glb", - check_existing=True, - export_format='GLB', - export_keep_originals=True, - use_selection=True - ) - if input.model_file_format == 'GLTF_SEPARATE': - check_failed_exists(f"{model_path}.gltf") - check_failed_exists(f"{model_path}.bin") - bpy.ops.export_scene.gltf( - filepath=f"{model_path}", - check_existing=True, - export_format='GLTF_SEPARATE', - export_keep_originals=True, - use_selection=True - ) - if input.model_file_format == 'GLTF_EMBEDDED': - check_failed_exists(f"{model_path}.gltf") - bpy.ops.export_scene.gltf( - filepath=f"{model_path}.gltf", - check_existing=True, - export_format='GLTF_EMBEDDED', - export_keep_originals=True, - use_selection=True - ) - elif input.model_file_format == 'FBX': - check_failed_exists(f"{model_path}.fbx") - bpy.ops.export_scene.fbx( - filepath=f"{model_path}.fbx", - check_existing=True, - use_selection=True - ) - elif input.model_file_format == 'OBJ': - check_failed_exists(f"{model_path}.obj") - bpy.ops.export_scene.obj( - filepath=f"{model_path}.obj", - check_existing=True, - use_selection=True, - ) - elif input.model_file_format == 'X3D': - check_failed_exists(f"{model_path}.x3d") - bpy.ops.export_scene.x3d( - filepath=f"{model_path}.x3d", - check_existing=True, - use_selection=True - ) - elif input.model_file_format == 'STL': - check_failed_exists(f"{model_path}.stl") - bpy.ops.export_mesh.stl( - filepath=f"{model_path}.stl", - check_existing=True, - use_selection=True - ) - elif input.model_file_format == 'VOX': - check_failed_exists(f"{model_path}.vox") - bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox") + if not input.enable_debug: + if input.model_file_format == 'GLB': + check_failed_exists(f"{model_path}.glb") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}.glb", + check_existing=True, + export_format='GLB', + export_keep_originals=True, + use_selection=True + ) + if input.model_file_format == 'GLTF_SEPARATE': + check_failed_exists(f"{model_path}.gltf") + check_failed_exists(f"{model_path}.bin") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}", + check_existing=True, + export_format='GLTF_SEPARATE', + export_keep_originals=True, + use_selection=True + ) + if input.model_file_format == 'GLTF_EMBEDDED': + check_failed_exists(f"{model_path}.gltf") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}.gltf", + check_existing=True, + export_format='GLTF_EMBEDDED', + export_keep_originals=True, + use_selection=True + ) + elif input.model_file_format == 'FBX': + check_failed_exists(f"{model_path}.fbx") + bpy.ops.export_scene.fbx( + filepath=f"{model_path}.fbx", + check_existing=True, + use_selection=True + ) + elif input.model_file_format == 'OBJ': + check_failed_exists(f"{model_path}.obj") + bpy.ops.export_scene.obj( + filepath=f"{model_path}.obj", + check_existing=True, + use_selection=True, + ) + elif input.model_file_format == 'X3D': + check_failed_exists(f"{model_path}.x3d") + bpy.ops.export_scene.x3d( + filepath=f"{model_path}.x3d", + check_existing=True, + use_selection=True + ) + elif input.model_file_format == 'STL': + check_failed_exists(f"{model_path}.stl") + bpy.ops.export_mesh.stl( + filepath=f"{model_path}.stl", + check_existing=True, + use_selection=True + ) + elif input.model_file_format == 'VOX': + check_failed_exists(f"{model_path}.vox") + bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox") # Loading Animation: loading = Loader(f'Generating 3D model {x}/{nfts_in_batch}...', '').start() @@ -491,9 +509,9 @@ def render_and_save_nfts(input): model_generation_time_end = time.time() - print( - f"{TextColors.OK}Generated 3D model in {model_generation_time_end - model_generation_time_start}s." - f"\n{TextColors.RESET}" + log.info( + f"\n{TextColors.OK}TIME [Generated 3D Model]: " + f"{model_generation_time_end - model_generation_time_start}s.{TextColors.RESET}" ) # Generating Metadata: @@ -561,7 +579,7 @@ def render_and_save_nfts(input): with open(os.path.join(bmnft_data_folder, "Data_" + name + ".json"), 'w') as outfile: outfile.write(json_meta_data + '\n') - print(f"Completed {name} render in {time.time() - time_start_2}s") + log.info(f"{TextColors.OK}\nTIME [NFT {name} Generated]: {time.time() - time_start_2}s") save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batch_to_generate) @@ -574,8 +592,11 @@ def render_and_save_nfts(input): batch_complete_time = time.time() - time_start_1 - print(f"\nAll NFTs successfully generated and sent to {input.nft_batch_save_path}" - f"\nCompleted all renders in Batch{input.batch_to_generate}.json in {batch_complete_time}s\n") + log.info( + f"\nAll NFTs in Batch {input.batch_to_generate} successfully generated and saved at:" + f"\n{input.nft_batch_save_path}" + f"\nTIME [Batch {input.batch_to_generate} Generated]: {batch_complete_time}s\n" + ) batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1, "Average time per generation": batch_complete_time / x - 1} @@ -630,11 +651,9 @@ def render_and_save_nfts(input): # Automatic Shutdown: # If user selects automatic shutdown but did not specify time after Batch completion def shutdown(time): - plateform = platform.system() - - if plateform == "Windows": + if platform.system() == "Windows": os.system(f"shutdown /s /t {time}") - if plateform == "Darwin": + if platform.system() == "Darwin": os.system(f"shutdown /s /t {time}") if input.enable_auto_shutdown and not input.specify_time_bool: diff --git a/main/helpers.py b/main/helpers.py index d35f220..d0839cb 100644 --- a/main/helpers.py +++ b/main/helpers.py @@ -1,14 +1,20 @@ import bpy import os +import sys import json import copy +import logging +import tempfile import platform +import traceback from time import sleep from itertools import cycle from threading import Thread from shutil import get_terminal_size from collections import Counter, defaultdict +log = logging.getLogger(__name__) + # ======== CONSTANTS ======== # @@ -139,24 +145,28 @@ def get_hierarchy(): for i in att_vars: # Check if name follows naming conventions: if int(i.count("_")) > 2 and int(i.split("_")[1]) > 0: - raise Exception( + log.error( + f"\n{traceback.format_exc()}" f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"There is a naming issue with the following Attribute/Variant: '{i}'\n" f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) + raise Exception() try: number = i.split("_")[1] name = i.split("_")[0] rarity = i.split("_")[2] except IndexError: - raise Exception( + log.error( + f"\n{traceback.format_exc()}" f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"There is a naming issue with the following Attribute/Variant: '{i}'\n" f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}" f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) + raise Exception() all_att_data_list[i] = {"name": name, "number": number, "rarity": rarity} return all_att_data_list @@ -174,7 +184,7 @@ def get_hierarchy(): for a in hierarchy: for b in hierarchy[a]: for x in variant_meta_data: - if str(x)==str(b): + if str(x) == str(b): (hierarchy[a])[b] = variant_meta_data[x] return hierarchy @@ -195,10 +205,10 @@ def get_combinations(): for i in hierarchy: # Ignore Collections with nothing in them - if len(hierarchy[i])!=0: + if len(hierarchy[i]) != 0: hierarchy_by_num.append(len(hierarchy[i])) else: - print(f"The following collection has been identified as empty: {i}") + log.warning(f"\nThe following collection has been identified as empty: {i}") combinations = 1 for i in hierarchy_by_num: @@ -232,13 +242,14 @@ def check_scene(): # Not complete scriptIgnoreCollection = bpy.data.collections["Script_Ignore"] script_ignore_exists = True except KeyError: - raise TypeError( + log.error( + f"\n{traceback.format_exc()}" f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"Add a Script_Ignore collection to your Blender scene and ensure the name is exactly 'Script_Ignore'. " - f"For more information, " - f"see:" + f"For more information, see:" f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{TextColors.RESET}" ) + raise hierarchy = get_hierarchy() collections = bpy.context.scene.collection @@ -288,23 +299,27 @@ def check_rarity(hierarchy, dna_list_formatted, save_path): complete_data[i] = x - print( - f"\n{TextColors.OK}\n" - f"Rarity Checker is active. These are the percentages for each variant per attribute you set in your .blend" - f" file: \n{TextColors.RESET}" - ) - + # Saving Rarity data to console and log: + x = f"\nPercentages for each Variant per Attribute:" for i in complete_data: - print(i + ":") - for j in complete_data[i]: - print(" " + j + ": " + complete_data[i][j][0] + " Occurrences: " + complete_data[i][j][1]) + x += f"\n\n{i}:" + if complete_data[i]: + for j in complete_data[i]: + x += f"\n - {j}: {complete_data[i][j][0]} occurs {complete_data[i][j][1]} times." + else: + x += f"\n - Variants not selected." + + log.info(x) json_meta_data = json.dumps(complete_data, indent=1, ensure_ascii=True) with open(os.path.join(save_path, "RarityData.json"), 'w') as outfile: outfile.write(json_meta_data + '\n') path = os.path.join(save_path, "RarityData.json") - print(TextColors.OK + f"Rarity Data has been saved to {path}." + TextColors.RESET) + + log.info( + f"\nRarity data has been saved to:\n{path}" + ) def check_duplicates(dna_list_formatted): @@ -318,11 +333,18 @@ def check_duplicates(dna_list_formatted): for x in dna_list: if x in seen: - print(x) duplicates += 1 seen.add(x) - print(f"\nNFTRecord.json contains {duplicates} duplicate NFT DNA.") + if duplicates > 0: + log.warning( + f"\n{TextColors.WARNING}Blend_My_NFTs Warning:\n" + f"{duplicates} duplicate NFT DNA was detected. This should not be possible. For more information, see:" + f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure" + f"\n{TextColors.RESET}" + ) + + log.info(f"\n\nDuplicate NFT DNA found: {duplicates}") def check_failed_batches(batch_json_save_path): @@ -355,7 +377,8 @@ def raise_error_num_batches(max_nfts, nfts_per_batch): num_batches = max_nfts / nfts_per_batch return num_batches except ZeroDivisionError: - raise ZeroDivisionError( + log.error( + f"\n{traceback.format_exc()}" f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"The number of NFTs per Batch must be greater than ZERO." f"Please review your Blender scene and ensure it follows " @@ -364,12 +387,14 @@ def raise_error_num_batches(max_nfts, nfts_per_batch): f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure" f"\n{TextColors.RESET}" ) + raise ZeroDivisionError() def raise_error_zero_combinations(): """Checks if combinations is greater than 0, if so, raises error.""" if get_combinations() == 0: - raise ValueError( + log.error( + f"\n{traceback.format_exc()}" f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"The number of all possible combinations is ZERO. Please review your Blender scene and ensure it " f"follows the naming conventions and scene structure. For more information, see:\n{TextColors.RESET}" @@ -377,10 +402,13 @@ def raise_error_zero_combinations(): f"\n{TextColors.RESET}" ) + raise ValueError() + def raise_error_num_batches_greater_then(num_batches): if num_batches < 1: - raise ValueError( + log.error( + f"\n{traceback.format_exc()}" f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" f"The number of Batches is less than 1. Please review your Blender scene and ensure it follows " f"the naming conventions and scene structure. For more information, " @@ -388,6 +416,7 @@ def raise_error_num_batches_greater_then(num_batches): f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure" f"\n{TextColors.RESET}" ) + raise ValueError() # Raise Warnings: @@ -397,12 +426,15 @@ def raise_warning_max_nfts(nfts_per_batch, collection_size): """ if nfts_per_batch > collection_size: - raise ValueError( + log.error( + f"\n{traceback.format_exc()}" f"\n{TextColors.WARNING}Blend_My_NFTs Warning:\n" f"The number of NFTs Per Batch you set is smaller than the NFT Collection Size you set." f"\n{TextColors.RESET}" ) + raise ValueError() + def raise_warning_collection_size(dna_list, collection_size): """ @@ -410,18 +442,21 @@ def raise_warning_collection_size(dna_list, collection_size): """ if len(dna_list) < collection_size: - print(f"\n{TextColors.WARNING} \nWARNING: \n" - f"Blend_My_NFTs cannot generate {collection_size} NFTs." - f" Only {len(dna_list)} NFT DNA were generated." + log.warning( + f"\n{traceback.format_exc()}" + f"\n{TextColors.WARNING} \nWARNING: \n" + f"Blend_My_NFTs cannot generate {collection_size} NFTs." + f" Only {len(dna_list)} NFT DNA were generated." - f"\nThis might be for a number of reasons:" - f"\n a) Rarity is preventing combinations from being generated (See " - f"https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n " - f"\n b) Logic is preventing combinations from being generated (See " - f"https://github.com/torrinworx/Blend_My_NFTs#logic).\n " - f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or " - f"Attributes to increase the recommended collection size.\n " - f"\n{TextColors.RESET}") + f"\nThis might be for a number of reasons:" + f"\n a) Rarity is preventing combinations from being generated (See " + f"https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n " + f"\n b) Logic is preventing combinations from being generated (See " + f"https://github.com/torrinworx/Blend_My_NFTs#logic).\n " + f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or " + f"Attributes to increase the recommended collection size.\n " + f"\n{TextColors.RESET}" + ) # ======== LOADING ANIMATION ======== # @@ -480,3 +515,39 @@ class Loader: def __exit__(self, exc_type, exc_value, tb): # handle exceptions with those variables ^ self.stop() + + +def activate_logging(): + """ + Used as an intermediate activated at runtime of the following operators: CreateData, ExportNFTs, ResumeFailedBatch, + RefactorBatches, and ExportSettings. Must be independent of 'input' class to be safe, gets variables directly from + bpy. + """ + + log_path = bpy.context.scene.input_tool.log_path + if log_path: + file_handler = logging.FileHandler(os.path.join(log_path, 'BMNFTs_Log.txt'), 'a') + else: + file_handler = logging.FileHandler(os.path.join(tempfile.gettempdir(), 'BMNFTs_Log.txt'), 'a') + + formatter = logging.Formatter( + '[%(asctime)s] [%(levelname)s] [%(filename)s > %(funcName)s() > Line:%(lineno)d]\n%(message)s\n' + ) + file_handler.setFormatter(formatter) + + log = logging.getLogger() + for handler in log.handlers[:]: + if isinstance(handler, logging.FileHandler): + log.removeHandler(handler) + if isinstance(handler, logging.StreamHandler): + log.removeHandler(handler) + log.addHandler(file_handler) + + # Record log to console: + console_handler = logging.StreamHandler(sys.stdout) + log.addHandler(console_handler) + + if bpy.context.scene.input_tool.enable_debug: + logging.getLogger().setLevel(logging.DEBUG) + else: + logging.getLogger().setLevel(logging.INFO) diff --git a/main/intermediate.py b/main/intermediate.py index 0088748..e44ffc7 100644 --- a/main/intermediate.py +++ b/main/intermediate.py @@ -1,9 +1,14 @@ +import logging + import bpy import json from main import dna_generator, exporter -# TODO: migrate this code to the exporter.py to simplify render process into one file. +log = logging.getLogger(__name__) + +# TODO: migrate this code to the dna_generator.py(send_to_record) and exporter.py(render_and_save) to simplify render +# process into one file. def send_to_record(input, reverse_order=False): @@ -12,7 +17,10 @@ def send_to_record(input, reverse_order=False): input.logic_file = json.load(open(input.logic_file)) if input.enable_logic_json and not input.logic_file: - print({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.") + log.error( + f"No Logic.json file path set. Please set the file path to your Logic.json file." + ) + raise if not input.enable_logic_json: scn = bpy.context.scene @@ -66,19 +74,21 @@ def render_and_save_nfts(input, reverse_order=False): for i in range(scn.custom_metadata_fields_index, -1, -1): item = scn.custom_metadata_fields[i] if item.field_name in list(input.custom_fields.keys()): - raise ValueError( - f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names " - f"are unique." + log.error( + f"A duplicate of '{item.field_name}' was found. Ensure all Custom Metadata field " + f"Names are unique." ) + raise ValueError() else: input.custom_fields[item.field_name] = item.field_value else: for item in scn.custom_metadata_fields: if item.field_name in list(input.custom_fields.keys()): - raise ValueError( - f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names " - f"are unique." + log.error( + f"A duplicate of '{item.field_name}' was found. Ensure all Custom Metadata field " + f"Names are unique." ) + raise ValueError() else: input.custom_fields[item.field_name] = item.field_value diff --git a/main/logic.py b/main/logic.py index 7f2b0de..17331f2 100644 --- a/main/logic.py +++ b/main/logic.py @@ -3,10 +3,14 @@ # dna_generator.py import random +import logging +import traceback import collections from .helpers import TextColors +log = logging.getLogger(__name__) + def reconstruct_dna(deconstructed_dna): reconstructed_dna = "" @@ -116,22 +120,26 @@ def apply_rules_to_dna(hierarchy, deconstructed_dna, if_dict, result_dict, resul elif not if_zero_bool: variant_num = random.choices(number_list_of_i, weights=rarity_list_of_i, k=1) except IndexError: - raise IndexError( - f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Attribute collection '{a}'. For more information on " - f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"An issue was found within the Attribute collection '{a}'. For more information on " + f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) + raise IndexError() else: try: variant_num = random.choices(number_list_of_i, k=1) except IndexError: - raise IndexError( - f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Attribute collection '{a}'. For more information on " - f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"An issue was found within the Attribute collection '{a}'. For more information on " + f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) + raise IndexError() deconstructed_dna[int(attribute_index)] = str(variant_num[0]) return deconstructed_dna @@ -281,7 +289,7 @@ def logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity): result_dict_type, ) if violates_rule: - # print(f"======={deconstructed_dna} VIOLATES RULE======") + log.debug(f"======={deconstructed_dna} VIOLATES RULE======") deconstructed_dna = apply_rules_to_dna( hierarchy, diff --git a/main/material_generator.py b/main/material_generator.py index 5dc2d89..9a6374f 100644 --- a/main/material_generator.py +++ b/main/material_generator.py @@ -5,8 +5,12 @@ import json import random +import logging +import traceback from .helpers import TextColors +log = logging.getLogger(__name__) + def select_material(material_list, variant, enable_rarity): """Selects a material from a passed material list. """ @@ -39,22 +43,26 @@ def select_material(material_list, variant, enable_rarity): elif not if_zero_bool: selected_material = random.choices(material_list_of_i, weights=rarity_list_of_i, k=1) except IndexError: - raise IndexError( - f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Material List of the Variant collection '{variant}'. For more " - f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"An issue was found within the Material List of the Variant collection '{variant}'. For more " + f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) + raise IndexError() else: try: selected_material = random.choices(material_list_of_i, k=1) except IndexError: - raise IndexError( - f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Material List of the Variant collection '{variant}'. For more " - f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"An issue was found within the Material List of the Variant collection '{variant}'. For more " + f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) + raise IndexError() return selected_material[0], material_list diff --git a/main/refactorer.py b/main/refactorer.py index b29f1c7..41350b7 100644 --- a/main/refactorer.py +++ b/main/refactorer.py @@ -4,9 +4,12 @@ import os import json import shutil +import logging from .helpers import remove_file_by_extension +log = logging.getLogger(__name__) + def reformat_nft_collection(refactor_panel_input): complete_coll_path = os.path.join(refactor_panel_input.save_path, "Blend_My_NFTs Output", "Complete_Collection") @@ -41,6 +44,6 @@ def reformat_nft_collection(refactor_panel_input): with open(os.path.join(complete_coll_path, "collection_info.json"), 'w') as outfile: outfile.write(collection_info + '\n') - print(f"All NFT files stored and sorted to the Complete_Collection folder in {refactor_panel_input.save_path}") + log.info(f"All NFT files stored and sorted to the Complete_Collection folder in {refactor_panel_input.save_path}") shutil.rmtree(refactor_panel_input.nft_batch_save_path)