diff --git a/UILists/Custom_Metadata_UIList.py b/UILists/custom_metadata_ui_list.py similarity index 99% rename from UILists/Custom_Metadata_UIList.py rename to UILists/custom_metadata_ui_list.py index db88f25..54b4575 100644 --- a/UILists/Custom_Metadata_UIList.py +++ b/UILists/custom_metadata_ui_list.py @@ -11,6 +11,7 @@ from bpy.types import (Operator, PropertyGroup, UIList) + # ======== Operators ======== # class CUSTOM_OT_custom_metadata_fields_actions(Operator): """Move items up and down, add and remove""" @@ -103,6 +104,7 @@ class CUSTOM_UL_custom_metadata_fields_items(UIList): def invoke(self, context, event): pass + # ======== Property Collection ======== # class CUSTOM_custom_metadata_fields_objectCollection(PropertyGroup): # name: StringProperty() -> Instantiated by default diff --git a/UILists/Logic_UIList.py b/UILists/logic_ui_list.py similarity index 99% rename from UILists/Logic_UIList.py rename to UILists/logic_ui_list.py index 0f82f42..43c2f7f 100644 --- a/UILists/Logic_UIList.py +++ b/UILists/logic_ui_list.py @@ -11,6 +11,7 @@ from bpy.types import (Operator, PropertyGroup, UIList) + # ======== Operators ======== # class CUSTOM_OT_logic_actions(Operator): """Move items up and down, add and remove""" diff --git a/__init__.py b/__init__.py index 0217ead..f1c4b51 100644 --- a/__init__.py +++ b/__init__.py @@ -1,26 +1,27 @@ bl_info = { "name": "Blend_My_NFTs", - "author": "Torrin Leonard, This Cozy Studio Inc", - "version": (4, 5, 0), + "author": "Torrin Leonard, This Cozy Studio Inc.", + "version": (4, 5, 1), "blender": (3, 2, 2), "location": "View3D", - "description": "A free and opensource Blender add-on that enables you to create thousands of unique images, animations, and 3D models.", + "description": "A free and opensource Blender add-on that enables you to create thousands of unique images, " + "animations, and 3D models.", "support": "COMMUNITY", "doc_url": "https://github.com/torrinworx/Blend_My_NFTs", "tracker_url": "https://github.com/torrinworx/Blend_My_NFTs/issues/new", "category": "Development", } -BMNFTS_VERSION = "v4.5.0" -LAST_UPDATED = "12:34AM, Aug 11th, 2022" +BMNFTS_VERSION = "v4.5.1" +LAST_UPDATED = "01:02PM, Aug 24th, 2022" # ======== Import handling ======== # +# Blender modules: import bpy from bpy.app.handlers import persistent -from bpy.props import (IntProperty, - BoolProperty, - CollectionProperty) +from bpy.props import (IntProperty, BoolProperty, CollectionProperty) + # Python modules: import os import sys @@ -34,57 +35,52 @@ from datetime import datetime, timezone # "a little hacky bs" - matt159 ;) sys.path.append(os.path.dirname(os.path.realpath(__file__))) -# Local file imports: +# Local modules: from main import \ - Checks, \ - DNA_Generator, \ - Exporter, \ - get_combinations, \ - HeadlessUtil, \ - Intermediate, \ - loading_animation, \ - Logic, \ - Material_Generator, \ - Metadata, \ - Rarity, \ - Refactorer + helpers, \ + dna_generator, \ + exporter, \ + headless_util, \ + intermediate, \ + logic, \ + material_generator, \ + metadata_templates, \ + refactorer from UILists import \ - Custom_Metadata_UIList, \ - Logic_UIList + custom_metadata_ui_list, \ + logic_ui_list +# Refresh Locals for development: if "bpy" in locals(): modules = { - "Checks": Checks, - "DNA_Generator": DNA_Generator, - "Exporter": Exporter, - "get_combinations": get_combinations, - "HeadlessUtil": HeadlessUtil, - "loading_animation": loading_animation, - "Intermediate": Intermediate, - "Logic": Logic, - "Material_Generator": Material_Generator, - "Metadata": Metadata, - "Rarity": Rarity, - "Refactorer": Refactorer, - "Custom_Metadata_UIList": Custom_Metadata_UIList, - "Logic_UIList": Logic_UIList, + "helpers": helpers, + "dna_generator": dna_generator, + "exporter": exporter, + "headless_util": headless_util, + "intermediate": intermediate, + "logic": logic, + "material_generator": material_generator, + "metadata_templates": metadata_templates, + "refactorer": refactorer, + "custom_metadata_ui_list": custom_metadata_ui_list, + "logic_ui_list": logic_ui_list, } for i in modules: if i in locals(): importlib.reload(modules[i]) -# ======== Persistant UI Refresh ======== # - +# ======== Persistent UI Refresh ======== # # Used for updating text and buttons in UI panels + combinations: int = 0 recommended_limit: int = 0 dt = datetime.now(timezone.utc).astimezone() # Date Time in UTC local @persistent -def Refresh_UI(dummy1, dummy2): +def refresh_ui(dummy1, dummy2): """ Refreshes the UI upon user interacting with Blender (using depsgraph_update_post handler). Might be a better handler to use. @@ -92,7 +88,7 @@ def Refresh_UI(dummy1, dummy2): global combinations global recommended_limit - combinations = (get_combinations.get_combinations()) + combinations = (helpers.get_combinations()) recommended_limit = int(round(combinations / 2)) # Add panel classes that require refresh to this refresh_panels tuple: @@ -111,124 +107,135 @@ def Refresh_UI(dummy1, dummy2): redraw_panel(refresh_panel_classes) -bpy.app.handlers.depsgraph_update_post.append(Refresh_UI) +bpy.app.handlers.depsgraph_update_post.append(refresh_ui) # ======== Defining BMNFTs Data ======== # @dataclass class BMNFTData: - nftName: str + nft_name: str save_path: str - nftsPerBatch: int - batchToGenerate: int - collectionSize: int + nfts_per_batch: int + batch_to_generate: int + collection_size: int - Blend_My_NFTs_Output: str + blend_my_nfts_output: str batch_json_save_path: str - nftBatch_save_path: str + nft_batch_save_path: str - enableImages: bool - imageFileFormat: str + enable_images: bool + image_file_format: str - enableAnimations: bool - animationFileFormat: str + enable_animations: bool + animation_file_format: str - enableModelsBlender: bool - modelFileFormat: str + enable_models: bool + model_file_format: str - enableCustomFields: bool + enable_custom_fields: bool - cardanoMetaDataBool: bool - solanaMetaDataBool: bool - erc721MetaData: bool + cardano_metadata_bool: bool + solana_metadata_bool: bool + erc721_metadata: bool cardano_description: str solana_description: str erc721_description: str - enableMaterials: bool - materialsFile: str + enable_materials: bool + materials_file: str - enableLogic: bool - enable_Logic_Json: bool - logicFile: str + enable_logic: bool + enable_logic_json: bool + logic_file: str - enableRarity: bool + enable_rarity: bool - enableAutoShutdown: bool + enable_auto_shutdown: bool - specify_timeBool: bool + specify_time_bool: bool hours: int minutes: int - emailNotificationBool: bool + email_notification_bool: bool sender_from: str email_password: str receiver_to: str - custom_Fields: dict = None + enable_debug: bool + log_path: str + + enable_dry_run: str + + custom_fields: dict = None fail_state: Any = False failed_batch: Any = None failed_dna: Any = None failed_dna_index: Any = None def __post_init__(self): - self.custom_Fields = {} + self.custom_fields = {} -def getBMNFTData(): +def get_bmnft_data(): _save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) _Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path) + # IMPORTANT: if a new directory variable is ever added, use 'bpy.path.abspath' instead of 'os.path.abspath'. data = BMNFTData( - nftName=bpy.context.scene.input_tool.nftName, - save_path=_save_path, - nftsPerBatch=bpy.context.scene.input_tool.nftsPerBatch, - batchToGenerate=bpy.context.scene.input_tool.batchToGenerate, - collectionSize=bpy.context.scene.input_tool.collectionSize, + nft_name=bpy.context.scene.input_tool.nft_name, + save_path=bpy.path.abspath(_save_path), # Converting from Blender's relative path system to absolute. + nfts_per_batch=bpy.context.scene.input_tool.nfts_per_batch, + batch_to_generate=bpy.context.scene.input_tool.batch_to_generate, + collection_size=bpy.context.scene.input_tool.collection_size, - enableRarity=bpy.context.scene.input_tool.enableRarity, + enable_rarity=bpy.context.scene.input_tool.enable_rarity, - Blend_My_NFTs_Output=_Blend_My_NFTs_Output, + blend_my_nfts_output=_Blend_My_NFTs_Output, batch_json_save_path=_batch_json_save_path, - nftBatch_save_path=_nftBatch_save_path, + nft_batch_save_path=_nftBatch_save_path, - enableLogic=bpy.context.scene.input_tool.enableLogic, - enable_Logic_Json=bpy.context.scene.input_tool.enable_Logic_Json, - logicFile=bpy.context.scene.input_tool.logicFile, + enable_logic=bpy.context.scene.input_tool.enable_logic, + enable_logic_json=bpy.context.scene.input_tool.enable_logic_json, + logic_file=bpy.path.abspath(bpy.context.scene.input_tool.logic_file), - enableImages=bpy.context.scene.input_tool.imageBool, - imageFileFormat=bpy.context.scene.input_tool.imageEnum, + enable_images=bpy.context.scene.input_tool.image_bool, + image_file_format=bpy.context.scene.input_tool.image_enum, - enableAnimations=bpy.context.scene.input_tool.animationBool, - animationFileFormat=bpy.context.scene.input_tool.animationEnum, + enable_animations=bpy.context.scene.input_tool.animation_bool, + animation_file_format=bpy.context.scene.input_tool.animation_enum, - enableModelsBlender=bpy.context.scene.input_tool.modelBool, - modelFileFormat=bpy.context.scene.input_tool.modelEnum, + enable_models=bpy.context.scene.input_tool.model_bool, + model_file_format=bpy.context.scene.input_tool.model_enum, - enableCustomFields=bpy.context.scene.input_tool.enableCustomFields, + enable_custom_fields=bpy.context.scene.input_tool.enable_custom_fields, - cardanoMetaDataBool=bpy.context.scene.input_tool.cardanoMetaDataBool, - solanaMetaDataBool=bpy.context.scene.input_tool.solanaMetaDataBool, - erc721MetaData=bpy.context.scene.input_tool.erc721MetaData, + cardano_metadata_bool=bpy.context.scene.input_tool.cardano_metadata_bool, + solana_metadata_bool=bpy.context.scene.input_tool.solana_metadata_bool, + erc721_metadata=bpy.context.scene.input_tool.erc721_metadata, cardano_description=bpy.context.scene.input_tool.cardano_description, solana_description=bpy.context.scene.input_tool.solana_description, erc721_description=bpy.context.scene.input_tool.erc721_description, - enableMaterials=bpy.context.scene.input_tool.enableMaterials, - materialsFile=bpy.path.abspath(bpy.context.scene.input_tool.materialsFile), + enable_materials=bpy.context.scene.input_tool.enable_materials, + materials_file=bpy.path.abspath(bpy.context.scene.input_tool.materials_file), - enableAutoShutdown=bpy.context.scene.input_tool.enableAutoShutdown, + enable_auto_shutdown=bpy.context.scene.input_tool.enable_auto_shutdown, - specify_timeBool=bpy.context.scene.input_tool.specify_timeBool, + specify_time_bool=bpy.context.scene.input_tool.specify_time_bool, hours=bpy.context.scene.input_tool.hours, minutes=bpy.context.scene.input_tool.minutes, - emailNotificationBool=bpy.context.scene.input_tool.emailNotificationBool, + email_notification_bool=bpy.context.scene.input_tool.email_notification_bool, sender_from=bpy.context.scene.input_tool.sender_from, email_password=bpy.context.scene.input_tool.email_password, receiver_to=bpy.context.scene.input_tool.receiver_to, + + enable_debug=bpy.context.scene.input_tool.enable_debug, + log_path=bpy.path.abspath(bpy.context.scene.input_tool.log_path), + + enable_dry_run=bpy.context.scene.input_tool.enable_dry_run ) return data @@ -251,7 +258,7 @@ def make_directories(save_path): return Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path -def runAsHeadless(): +def run_as_headless(): """ For use when running from the command line. """ @@ -267,35 +274,35 @@ def runAsHeadless(): print('Using {} devices for rendering!'.format(cprefs.get_num_gpu_devices())) - def dumpSettings(settings): - output = ( - f"nftName={settings.nftName}\n" - f"collectionSize={str(settings.collectionSize)}\n" - f"nftsPerBatch={str(settings.nftsPerBatch)}\n" - f"save_path={settings.save_path}\n" - f"enableRarity={(settings.enableRarity)}\n" - f"enableLogic={str(settings.enableLogic)}\n" - f"imageBool={str(settings.imageBool)}\n" - f"imageEnum={settings.imageEnum}\n" - f"animationBool={str(settings.animationBool)}\n" - f"animationEnum={settings.animationEnum}\n" - f"modelBool={str(settings.modelBool)}\n" - f"modelEnum={settings.modelEnum}\n" - f"batchToGenerate={str(settings.batchToGenerate)}\n" - f"cardanoMetaDataBool={str(settings.cardanoMetaDataBool)}\n" - f"cardano_description={settings.cardano_description}\n" - f"erc721MetaData={str(settings.erc721MetaData)}\n" - f"erc721_description={settings.erc721_description}\n" - f"solanaMetaDataBool={str(settings.solanaMetaDataBool)}\n" - f"solana_description={settings.solana_description}\n" - f"enableCustomFields={str(settings.enableCustomFields)}\n" - f"customfieldsFile={settings.customfieldsFile}\n" - f"enableMaterials={str(settings.customfieldsFile)}\n" - f"materialsFile={settings.materialsFile}\n" - ) - print(output) + # def dumpSettings(settings): + # output = ( + # f"nft_name={settings.nft_name}\n" + # f"collection_size={str(settings.collection_size)}\n" + # f"nfts_per_batch={str(settings.nfts_per_batch)}\n" + # f"save_path={settings.save_path}\n" + # f"enable_rarity={(settings.enable_rarity)}\n" + # f"enable_logic={str(settings.enable_logic)}\n" + # f"image_bool={str(settings.image_bool)}\n" + # f"image_enum={settings.image_enum}\n" + # f"animation_bool={str(settings.animation_bool)}\n" + # f"animation_enum={settings.animation_enum}\n" + # f"model_bool={str(settings.model_bool)}\n" + # f"model_enum={settings.model_enum}\n" + # f"batch_to_generate={str(settings.batch_to_generate)}\n" + # f"cardano_metadata_bool={str(settings.cardano_metadata_bool)}\n" + # f"cardano_description={settings.cardano_description}\n" + # f"erc721_metadata={str(settings.erc721_metadata)}\n" + # f"erc721_description={settings.erc721_description}\n" + # f"solana_metadata_bool={str(settings.solana_metadata_bool)}\n" + # f"solana_description={settings.solana_description}\n" + # f"enable_custom_fields={str(settings.enable_custom_fields)}\n" + # f"custom_fields_file={settings.custom_fields_file}\n" + # f"enable_materials={str(settings.custom_fields_file)}\n" + # f"materials_file={settings.materials_file}\n" + # ) + # print(output) - args, parser = HeadlessUtil.getPythonArgs() + args, parser = headless_util.get_python_args() settings = bpy.context.scene.input_tool @@ -308,61 +315,69 @@ def runAsHeadless(): # print(pairs) - settings.nftName = pairs[0][1] - settings.collectionSize = int(pairs[1][1]) - settings.nftsPerBatch = int(pairs[2][1]) + settings.nft_name = pairs[0][1] + settings.collection_size = int(pairs[1][1]) + settings.nfts_per_batch = int(pairs[2][1]) settings.save_path = pairs[3][1] - settings.enableRarity = pairs[4][1] == 'True' - settings.enableLogic = pairs[5][1] == 'True' + settings.enable_rarity = pairs[4][1] == 'True' + settings.enable_logic = pairs[5][1] == 'True' settings.enableLogicJson = pairs[6][1] == 'True' - settings.logicFile = pairs[7][1] - settings.imageBool = pairs[8][1] == 'True' - settings.imageEnum = pairs[9][1] - settings.animationBool = pairs[10][1] == 'True' - settings.animationEnum = pairs[11][1] - settings.modelBool = pairs[12][1] == 'True' - settings.modelEnum = pairs[13][1] - settings.batchToGenerate = int(pairs[14][1]) - settings.cardanoMetaDataBool = pairs[15][1] == 'True' + settings.logic_file = pairs[7][1] + settings.image_bool = pairs[8][1] == 'True' + settings.image_enum = pairs[9][1] + settings.animation_bool = pairs[10][1] == 'True' + settings.animation_enum = pairs[11][1] + settings.model_bool = pairs[12][1] == 'True' + settings.model_enum = pairs[13][1] + settings.batch_to_generate = int(pairs[14][1]) + settings.cardano_metadata_bool = pairs[15][1] == 'True' settings.cardano_description = pairs[16][1] - settings.erc721MetaData = pairs[17][1] == 'True' + settings.erc721_metadata = pairs[17][1] == 'True' settings.erc721_description = pairs[18][1] - settings.solanaMetaDataBool = pairs[19][1] == 'True' + settings.solana_metadata_bool = pairs[19][1] == 'True' settings.solanaDescription = pairs[20][1] - settings.enableCustomFields = pairs[21][1] == 'True' - settings.customfieldsFile = pairs[22][1] - settings.enableMaterials = pairs[23][1] == 'True' - settings.materialsFile = pairs[24][1] + settings.enable_custom_fields = pairs[21][1] == 'True' + settings.custom_fields_file = pairs[22][1] + settings.enable_materials = pairs[23][1] == 'True' + settings.materials_file = pairs[24][1] if args.save_path: settings.save_path = args.save_path if args.batch_number: - settings.batchToGenerate = args.batch_number + settings.batch_to_generate = args.batch_number - input = getBMNFTData() + input = get_bmnft_data() if args.batch_data_path: input.batch_json_save_path = args.batch_data_path if args.operation == 'create-dna': - Intermediate.send_To_Record_JSON(input) + intermediate.send_to_record(input) elif args.operation == 'generate-nfts': - Intermediate.render_and_save_NFTs(input) + intermediate.render_and_save_nfts(input) elif args.operation == 'refactor-batches': - Refactorer.reformatNFTCollection(input) + refactorer.reformat_nft_collection(input) # ======== User input Property Group ======== # class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): # Create NFT Data Panel: - nftName: bpy.props.StringProperty(name="NFT Name") + nft_name: bpy.props.StringProperty(name="NFT Name") - collectionSize: bpy.props.IntProperty(name="NFT Collection Size", default=1, min=1) # max=(combinations - offset) - nftsPerBatch: bpy.props.IntProperty(name="NFTs Per Batch", default=1, min=1) # max=(combinations - offset) + collection_size: bpy.props.IntProperty( + name="NFT Collection Size", + default=1, + min=1 + ) # max=(combinations - offset) + nfts_per_batch: bpy.props.IntProperty( + name="NFTs Per Batch", + default=1, + min=1 + ) # max=(combinations - offset) save_path: bpy.props.StringProperty( name="Save Path", @@ -372,11 +387,17 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="DIR_PATH" ) - enableRarity: bpy.props.BoolProperty(name="Enable Rarity") + enable_rarity: bpy.props.BoolProperty( + name="Enable Rarity" + ) - enableLogic: bpy.props.BoolProperty(name="Enable Logic") - enable_Logic_Json: bpy.props.BoolProperty(name="Use Logic.json instead") - logicFile: bpy.props.StringProperty( + enable_logic: bpy.props.BoolProperty( + name="Enable Logic" + ) + enable_logic_json: bpy.props.BoolProperty( + name="Use Logic.json instead" + ) + logic_file: bpy.props.StringProperty( name="Logic File Path", description="Path where Logic.json is located.", default="", @@ -384,8 +405,10 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="FILE_PATH" ) - enableMaterials: bpy.props.BoolProperty(name="Enable Materials") - materialsFile: bpy.props.StringProperty( + enable_materials: bpy.props.BoolProperty( + name="Enable Materials" + ) + materials_file: bpy.props.StringProperty( name="Materials File", description="Path where Materials.json is located.", default="", @@ -394,8 +417,10 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ) # Generate NFTs Panel: - imageBool: bpy.props.BoolProperty(name="Image") - imageEnum: bpy.props.EnumProperty( + image_bool: bpy.props.BoolProperty( + name="Image" + ) + image_enum: bpy.props.EnumProperty( name="Image File Format", description="Select Image file format", items=[ @@ -404,8 +429,10 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - animationBool: bpy.props.BoolProperty(name="Animation") - animationEnum: bpy.props.EnumProperty( + animation_bool: bpy.props.BoolProperty( + name="Animation" + ) + animation_enum: bpy.props.EnumProperty( name="Animation File Format", description="Select Animation file format", items=[ @@ -418,8 +445,10 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - modelBool: bpy.props.BoolProperty(name="3D Model") - modelEnum: bpy.props.EnumProperty( + model_bool: bpy.props.BoolProperty( + name="3D Model" + ) + model_enum: bpy.props.EnumProperty( name="3D Model File Format", description="Select 3D Model file format", items=[ @@ -436,21 +465,38 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): ] ) - batchToGenerate: bpy.props.IntProperty(name="Batch To Generate", default=1, - min=1) + batch_to_generate: bpy.props.IntProperty( + name="Batch To Generate", + default=1, + min=1 + ) # Refactor Batches & Create Metadata Panel: - cardanoMetaDataBool: bpy.props.BoolProperty(name="Cardano Cip") - cardano_description: bpy.props.StringProperty(name="Cardano description") + cardano_metadata_bool: bpy.props.BoolProperty( + name="Cardano Cip" + ) + cardano_description: bpy.props.StringProperty( + name="Cardano description" + ) - solanaMetaDataBool: bpy.props.BoolProperty(name="Solana Metaplex") - solana_description: bpy.props.StringProperty(name="Solana description") + solana_metadata_bool: bpy.props.BoolProperty( + name="Solana Metaplex" + ) + solana_description: bpy.props.StringProperty( + name="Solana description" + ) - erc721MetaData: bpy.props.BoolProperty(name="ERC721") - erc721_description: bpy.props.StringProperty(name="ERC721 description") + erc721_metadata: bpy.props.BoolProperty( + name="ERC721" + ) + erc721_description: bpy.props.StringProperty( + name="ERC721 description" + ) - enableCustomFields: bpy.props.BoolProperty(name="Enable Custom Metadata Fields") - customfieldsFile: bpy.props.StringProperty( + enable_custom_fields: bpy.props.BoolProperty( + name="Enable Custom Metadata Fields" + ) + custom_fields_file: bpy.props.StringProperty( name="Custom Fields File", description="Path where Custom_Fields.json is located.", default="", @@ -458,37 +504,76 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="FILE_PATH" ) - # TODO: Add 'Other' panel inputs to Headless functionality. - # Other Panel: - enableAutoSave: bpy.props.BoolProperty(name="Auto Save Before Generation", - description="Automatically saves your Blender file when 'Generate NFTs & Create Metadata' button is clicked") + enable_auto_save: bpy.props.BoolProperty( + name="Auto Save Before Generation", + description="Automatically saves your Blender file when 'Generate NFTs & Create Metadata' button is clicked" + ) - # Auto Shutdown: - enableAutoShutdown: bpy.props.BoolProperty(name="Auto Shutdown", - description="Automatically shuts down your computer after a Batch is finished Generating") + enable_auto_shutdown: bpy.props.BoolProperty( + name="Auto Shutdown", + description="Automatically shuts down your computer after a Batch is finished Generating" + ) - specify_timeBool: bpy.props.BoolProperty(name="Shutdown in a Given Amount of Time", - description="Wait a given amount of time after a Batch is generated before Automatic Shutdown") - hours: bpy.props.IntProperty(default=0, min=0) - minutes: bpy.props.IntProperty(default=0, min=0) + specify_time_bool: bpy.props.BoolProperty( + name="Shutdown in a Given Amount of Time", + description="Wait a given amount of time after a Batch is generated before Automatic Shutdown" + ) + hours: bpy.props.IntProperty( + default=0, min=0 + ) + minutes: bpy.props.IntProperty( + default=0, min=0 + ) - # Send Batch Complete Email: - emailNotificationBool: bpy.props.BoolProperty(name="Email Notifications", - description="Receive Email Notifications from Blender once a batch is finished generating") - sender_from: bpy.props.StringProperty(name="From", default="from@example.com") - email_password: bpy.props.StringProperty(name="Password", subtype='PASSWORD') - receiver_to: bpy.props.StringProperty(name="To", default="to@example.com") + email_notification_bool: bpy.props.BoolProperty( + name="Email Notifications", + description="Receive Email Notifications from Blender once a batch is finished generating" + ) + sender_from: bpy.props.StringProperty( + name="From", + default="from@example.com" + ) + email_password: bpy.props.StringProperty( + name="Password", + subtype='PASSWORD' + ) + receiver_to: bpy.props.StringProperty( + name="To", + default="to@example.com" + ) + + enable_debug: bpy.props.BoolProperty( + name="Enable Debug Mode", + description="Allows you to run Blend_My_NFTs without generating any content files and enables debugging " + "console messages saved to a BMNFTs_Log.txt file." + ) + log_path: bpy.props.StringProperty( + name="Debug Log Path", + description="Path where BMNFT_Log.txt is located.", + default="", + maxlen=1024, + subtype="FILE_PATH" + ) + + enable_dry_run: bpy.props.BoolProperty( + name="Enable Dry Run", + description="Allows you to run Blend_My_NFTs without generating any content files." + ) # API Panel properties: - apiKey: bpy.props.StringProperty(name="API Key", subtype='PASSWORD') # Test code for future faetures + api_key: bpy.props.StringProperty( + name="API Key", + subtype='PASSWORD' + ) # Test code for future features # ======== Main Operators ======== # -class createData(bpy.types.Operator): +class CreateData(bpy.types.Operator): bl_idname = 'create.data' bl_label = 'Create Data' - bl_description = 'Creates NFT Data. Run after any changes were made to scene. All previous data will be overwritten and cannot be recovered.' + bl_description = 'Creates NFT Data. Run after any changes were made to scene. All previous data will be ' \ + 'overwritten and cannot be recovered.' bl_options = {"REGISTER", "UNDO"} reverse_order: BoolProperty( @@ -496,15 +581,17 @@ class createData(bpy.types.Operator): name="Reverse Order") def execute(self, context): - # Handling Custom Fields UIList input: - input = getBMNFTData() + helpers.activate_logging() - if input.enableLogic: - if input.enable_Logic_Json and not input.logicFile: + # Handling Custom Fields UIList input: + input = get_bmnft_data() + + if input.enable_logic: + if input.enable_logic_json and not input.logic_file: self.report({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.") - Intermediate.send_To_Record_JSON(input) + intermediate.send_to_record(input) self.report({'INFO'}, f"NFT Data created!") return {"FINISHED"} @@ -513,7 +600,7 @@ class createData(bpy.types.Operator): return context.window_manager.invoke_confirm(self, event) -class exportNFTs(bpy.types.Operator): +class ExportNFTs(bpy.types.Operator): bl_idname = 'exporter.nfts' bl_label = 'Export NFTs' bl_description = 'Generate and export a given batch of NFTs.' @@ -524,101 +611,109 @@ class exportNFTs(bpy.types.Operator): name="Reverse Order") def execute(self, context): - input = getBMNFTData() - # Handling Custom Fields UIList input: + helpers.activate_logging() - Intermediate.render_and_save_NFTs(input) + input = get_bmnft_data() - self.report({'INFO'}, f"All NFTs generated for batch {input.batchToGenerate}!") + intermediate.render_and_save_nfts(input) + + self.report({'INFO'}, f"All NFTs generated for batch {input.batch_to_generate}!") return {"FINISHED"} -class resume_failed_batch(bpy.types.Operator): +class ResumeFailedBatch(bpy.types.Operator): bl_idname = 'exporter.resume_nfts' bl_label = 'Resume Failed Batch' bl_description = 'Failed Batch detected. Generate NFTs where the previous batch failed?' bl_options = {"REGISTER", "UNDO"} def execute(self, context): + helpers.activate_logging() + _save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) _Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path) - _batchToGenerate = bpy.context.scene.input_tool.batchToGenerate + _batchToGenerate = bpy.context.scene.input_tool.batch_to_generate file_name = os.path.join(_batch_json_save_path, "Batch{}.json".format(_batchToGenerate)) - batchData = json.load(open(file_name)) + batch_data = json.load(open(file_name)) - _fail_state, _failed_batch, _failed_dna, _failed_dna_index = Checks.check_FailedBatches(_batch_json_save_path) + _fail_state, _failed_batch, _failed_dna, _failed_dna_index = helpers.check_failed_batches(_batch_json_save_path) - render_settings = batchData["Generation Save"][-1]["Render_Settings"] + render_settings = batch_data["Generation Save"][-1]["Render_Settings"] input = BMNFTData( - nftName=render_settings["nftName"], + nft_name=render_settings["nft_name"], save_path=_save_path, - nftsPerBatch=render_settings["nftsPerBatch"], - batchToGenerate=render_settings["batchToGenerate"], - collectionSize=render_settings["collectionSize"], + nfts_per_batch=render_settings["nfts_per_batch"], + batch_to_generate=render_settings["batch_to_generate"], + collection_size=render_settings["collection_size"], - Blend_My_NFTs_Output=_Blend_My_NFTs_Output, + blend_my_nfts_output=_Blend_My_NFTs_Output, batch_json_save_path=_batch_json_save_path, - nftBatch_save_path=render_settings["nftBatch_save_path"], + nft_batch_save_path=render_settings["nft_batch_save_path"], - enableImages=render_settings["enableImages"], - imageFileFormat=render_settings["imageFileFormat"], + enable_images=render_settings["enable_images"], + image_file_format=render_settings["image_file_format"], - enableAnimations=render_settings["enableAnimations"], - animationFileFormat=render_settings["animationFileFormat"], + enable_animations=render_settings["enable_animations"], + animation_file_format=render_settings["animation_file_format"], - enableModelsBlender=render_settings["enableModelsBlender"], - modelFileFormat=render_settings["modelFileFormat"], + enable_models=render_settings["enable_models"], + model_file_format=render_settings["model_file_format"], - enableCustomFields=render_settings["enableCustomFields"], + enable_custom_fields=render_settings["enable_custom_fields"], - cardanoMetaDataBool=render_settings["cardanoMetaDataBool"], - solanaMetaDataBool=render_settings["solanaMetaDataBool"], - erc721MetaData=render_settings["erc721MetaData"], + cardano_metadata_bool=render_settings["cardano_metadata_bool"], + solana_metadata_bool=render_settings["solana_metadata_bool"], + erc721_metadata=render_settings["erc721_metadata"], cardano_description=render_settings["cardano_description"], solana_description=render_settings["solana_description"], erc721_description=render_settings["erc721_description"], - enableMaterials=render_settings["enableMaterials"], - materialsFile=render_settings["materialsFile"], + enable_materials=render_settings["enable_materials"], + materials_file=render_settings["materials_file"], - enableLogic=render_settings["enableLogic"], - enable_Logic_Json=render_settings["enable_Logic_Json"], - logicFile=render_settings["logicFile"], + enable_logic=render_settings["enable_logic"], + enable_logic_json=render_settings["enable_logic_json"], + logic_file=render_settings["logic_file"], - enableRarity=render_settings["enableRarity"], + enable_rarity=render_settings["enable_rarity"], - enableAutoShutdown=render_settings["enableAutoShutdown"], + enable_auto_shutdown=render_settings["enable_auto_shutdown"], - specify_timeBool=render_settings["specify_timeBool"], + specify_time_bool=render_settings["specify_time_bool"], hours=render_settings["hours"], minutes=render_settings["minutes"], - emailNotificationBool=render_settings["emailNotificationBool"], + email_notification_bool=render_settings["email_notification_bool"], sender_from=render_settings["sender_from"], email_password=render_settings["email_password"], receiver_to=render_settings["receiver_to"], + enable_debug=render_settings["enable_debug"], + log_path=render_settings["log_path"], + + enable_dry_run=render_settings["enable_dry_run"], + fail_state=_fail_state, failed_batch=_failed_batch, failed_dna=_failed_dna, failed_dna_index=_failed_dna_index, - custom_Fields=render_settings["custom_Fields"], + custom_fields=render_settings["custom_fields"], ) - Exporter.render_and_save_NFTs(input) + exporter.render_and_save_nfts(input) self.report({'INFO'}, f"Resuming Failed Batch Generation!") return {"FINISHED"} -class refactor_Batches(bpy.types.Operator): +class RefactorBatches(bpy.types.Operator): """Refactor your collection? This action cannot be undone.""" bl_idname = 'refactor.batches' bl_label = 'Refactor your Batches?' @@ -630,15 +725,16 @@ class refactor_Batches(bpy.types.Operator): name="Reverse Order") def execute(self, context): - # Passing info to main functions for refactoring: - Refactorer.reformatNFTCollection(getBMNFTData()) + helpers.activate_logging() + + refactorer.reformat_nft_collection(get_bmnft_data()) return {"FINISHED"} def invoke(self, context, event): return context.window_manager.invoke_confirm(self, event) -class export_settings(bpy.types.Operator): +class ExportSettings(bpy.types.Operator): """Export your settings into a configuration file.""" bl_idname = 'export.settings' bl_label = 'Export Settings' @@ -646,6 +742,8 @@ class export_settings(bpy.types.Operator): bl_options = {"REGISTER", "UNDO"} def execute(self, context): + helpers.activate_logging() + save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) filename = "config.cfg" @@ -657,51 +755,51 @@ class export_settings(bpy.types.Operator): "#when running Blend_My_NFTs in a headless environment.\n" "\n" "#The name of your nft project\n" - f"nftName={settings.nftName}\n" + f"nft_name={settings.nft_name}\n" "\n" "#NFT Collection Size\n" - f"collectionSize={settings.collectionSize}\n" + f"collection_size={settings.collection_size}\n" "\n" "#The number of NFTs to generate per batch\n" - f"nftsPerBatch={str(settings.nftsPerBatch)}\n" + f"nfts_per_batch={str(settings.nfts_per_batch)}\n" "\n" "#Save path for your NFT files\n" f"save_path={settings.save_path}\n" "\n" "#Enable Rarity\n" - f"enableRarity={(settings.enableRarity)}\n" + f"enable_rarity={settings.enable_rarity}\n" "\n" "#Enable Logic\n" - f"enableLogic={str(settings.enableLogic)}\n" - f"enableLogicJson={str(settings.enable_Logic_Json)}\n" - f"logicFilePath={settings.logicFile}\n" + f"enable_logic={str(settings.enable_logic)}\n" + f"enableLogicJson={str(settings.enable_logic_json)}\n" + f"logicFilePath={settings.logic_file}\n" "\n" "#NFT Media output type(s):\n" - f"imageBool={str(settings.imageBool)}\n" - f"imageEnum={settings.imageEnum}\n" - f"animationBool={str(settings.animationBool)}\n" - f"animationEnum={settings.animationEnum}\n" - f"modelBool={str(settings.modelBool)}\n" - f"modelEnum={settings.modelEnum}\n" + f"image_bool={str(settings.image_bool)}\n" + f"image_enum={settings.image_enum}\n" + f"animation_bool={str(settings.animation_bool)}\n" + f"animation_enum={settings.animation_enum}\n" + f"model_bool={str(settings.model_bool)}\n" + f"model_enum={settings.model_enum}\n" "\n" "#Batch to generate\n" - f"batchToGenerate={str(settings.batchToGenerate)}\n" + f"batch_to_generate={str(settings.batch_to_generate)}\n" "\n" "#Metadata Format\n" - f"cardanoMetaDataBool={str(settings.cardanoMetaDataBool)}\n" + f"cardano_metadata_bool={str(settings.cardano_metadata_bool)}\n" f"cardano_description={settings.cardano_description}\n" - f"erc721MetaData={str(settings.erc721MetaData)}\n" + f"erc721_metadata={str(settings.erc721_metadata)}\n" f"erc721_description={settings.erc721_description}\n" - f"solanaMetaDataBool={str(settings.solanaMetaDataBool)}\n" + f"solana_metadata_bool={str(settings.solana_metadata_bool)}\n" f"solana_description={settings.solana_description}\n" "\n" "#Enable Custom Fields\n" - f"enableCustomFields={str(settings.enableCustomFields)}\n" - f"customfieldsFile={settings.customfieldsFile}\n" + f"enable_custom_fields={str(settings.enable_custom_fields)}\n" + f"custom_fields_file={settings.custom_fields_file}\n" "\n" "#Enable Materials\n" - f"enableMaterials={str(settings.enableMaterials)}\n" - f"materialsFile={settings.materialsFile}\n" + f"enable_materials={str(settings.enable_materials)}\n" + f"materials_file={settings.materials_file}\n" ) print(output, file=config) @@ -725,29 +823,29 @@ class BMNFTS_PT_CreateData(bpy.types.Panel): input_tool_scene = scene.input_tool row = layout.row() - row.prop(input_tool_scene, "nftName") + row.prop(input_tool_scene, "nft_name") row = layout.row() layout.label(text=f"Maximum Number Of NFTs: {combinations}") layout.label(text=f"Recommended limit: {recommended_limit}") row = layout.row() - row.prop(input_tool_scene, "collectionSize") + row.prop(input_tool_scene, "collection_size") row = layout.row() - row.prop(input_tool_scene, "nftsPerBatch") + row.prop(input_tool_scene, "nfts_per_batch") row = layout.row() row.prop(input_tool_scene, "save_path") row = layout.row() - row.prop(input_tool_scene, "enableRarity") + row.prop(input_tool_scene, "enable_rarity") row = layout.row() - row.prop(input_tool_scene, "enableLogic") + row.prop(input_tool_scene, "enable_logic") # Logic_UIList implementation: - if bpy.context.scene.input_tool.enableLogic: + if bpy.context.scene.input_tool.enable_logic: layout = self.layout scn = bpy.context.scene @@ -771,18 +869,18 @@ class BMNFTS_PT_CreateData(bpy.types.Panel): row.label(text=f"*Field Names must be unique.") row = layout.row() - row.prop(input_tool_scene, "enable_Logic_Json") + row.prop(input_tool_scene, "enable_logic_json") - if bpy.context.scene.input_tool.enable_Logic_Json: + if bpy.context.scene.input_tool.enable_logic_json: row = layout.row() - row.prop(input_tool_scene, "logicFile") + row.prop(input_tool_scene, "logic_file") row = layout.row() - row.prop(input_tool_scene, "enableMaterials") + row.prop(input_tool_scene, "enable_materials") - if bpy.context.scene.input_tool.enableMaterials: + if bpy.context.scene.input_tool.enable_materials: row = layout.row() - row.prop(input_tool_scene, "materialsFile") + row.prop(input_tool_scene, "materials_file") row = layout.row() self.layout.operator("create.data", icon='DISCLOSURE_TRI_RIGHT', text="Create Data") @@ -806,26 +904,26 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): layout.label(text="NFT Media files:") row = layout.row() - row.prop(input_tool_scene, "imageBool") - if bpy.context.scene.input_tool.imageBool: - row.prop(input_tool_scene, "imageEnum") + row.prop(input_tool_scene, "image_bool") + if bpy.context.scene.input_tool.image_bool: + row.prop(input_tool_scene, "image_enum") row = layout.row() - row.prop(input_tool_scene, "animationBool") - if bpy.context.scene.input_tool.animationBool: - row.prop(input_tool_scene, "animationEnum") + row.prop(input_tool_scene, "animation_bool") + if bpy.context.scene.input_tool.animation_bool: + row.prop(input_tool_scene, "animation_enum") row = layout.row() - row.prop(input_tool_scene, "modelBool") - if bpy.context.scene.input_tool.modelBool: - row.prop(input_tool_scene, "modelEnum") + row.prop(input_tool_scene, "model_bool") + if bpy.context.scene.input_tool.model_bool: + row.prop(input_tool_scene, "model_enum") row = layout.row() layout.label(text="Meta Data format:") row = layout.row() - row.prop(input_tool_scene, "cardanoMetaDataBool") - if bpy.context.scene.input_tool.cardanoMetaDataBool: + row.prop(input_tool_scene, "cardano_metadata_bool") + if bpy.context.scene.input_tool.cardano_metadata_bool: row = layout.row() row.prop(input_tool_scene, "cardano_description") @@ -834,8 +932,8 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): icon='URL').url = "https://cips.cardano.org/cips/cip25/" row = layout.row() - row.prop(input_tool_scene, "solanaMetaDataBool") - if bpy.context.scene.input_tool.solanaMetaDataBool: + row.prop(input_tool_scene, "solana_metadata_bool") + if bpy.context.scene.input_tool.solana_metadata_bool: row = layout.row() row.prop(input_tool_scene, "solana_description") @@ -844,8 +942,8 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): icon='URL').url = "https://docs.metaplex.com/token-metadata/specification" row = layout.row() - row.prop(input_tool_scene, "erc721MetaData") - if bpy.context.scene.input_tool.erc721MetaData: + row.prop(input_tool_scene, "erc721_metadata") + if bpy.context.scene.input_tool.erc721_metadata: row = layout.row() row.prop(input_tool_scene, "erc721_description") @@ -854,10 +952,10 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): icon='URL').url = "https://docs.opensea.io/docs/metadata-standards" row = layout.row() - row.prop(input_tool_scene, "enableCustomFields") + row.prop(input_tool_scene, "enable_custom_fields") # Custom Metadata Fields UIList: - if bpy.context.scene.input_tool.enableCustomFields: + if bpy.context.scene.input_tool.enable_custom_fields: layout = self.layout scn = bpy.context.scene @@ -881,14 +979,14 @@ class BMNFTS_PT_GenerateNFTs(bpy.types.Panel): row.operator("custom_metadata_fields_uilist.clear_list", icon="X") row = layout.row() - row.prop(input_tool_scene, "batchToGenerate") + row.prop(input_tool_scene, "batch_to_generate") save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) Blend_My_NFTs_Output = os.path.join(save_path, "Blend_My_NFTs Output", "NFT_Data") batch_json_save_path = os.path.join(Blend_My_NFTs_Output, "Batch_Data") nftBatch_save_path = os.path.join(save_path, "Blend_My_NFTs Output", "Generated NFT Batches") - fail_state, failed_batch, failed_dna, failed_dna_index = Checks.check_FailedBatches(batch_json_save_path) + fail_state, failed_batch, failed_dna, failed_dna_index = helpers.check_failed_batches(batch_json_save_path) if fail_state: row = layout.row() @@ -946,16 +1044,16 @@ class BMNFTS_PT_Other(bpy.types.Panel): """ row = layout.row() - row.prop(input_tool_scene, "enableAutoSave") + row.prop(input_tool_scene, "enable_auto_save") # Auto Shutdown: row = layout.row() - row.prop(input_tool_scene, "enableAutoShutdown") + row.prop(input_tool_scene, "enable_auto_shutdown") row.label(text="*Must Run Blender as Admin") - if bpy.context.scene.input_tool.enableAutoShutdown: + if bpy.context.scene.input_tool.enable_auto_shutdown: row = layout.row() - row.prop(input_tool_scene, "specify_timeBool") + row.prop(input_tool_scene, "specify_time_bool") time_row1 = layout.row() time_row1.label(text=f"Hours") @@ -965,7 +1063,7 @@ class BMNFTS_PT_Other(bpy.types.Panel): time_row2.label(text=f"Minutes") time_row2.prop(input_tool_scene, "minutes", text="") - if not bpy.context.scene.input_tool.specify_timeBool: + if not bpy.context.scene.input_tool.specify_time_bool: time_row1.enabled = False time_row2.enabled = False else: @@ -974,10 +1072,10 @@ class BMNFTS_PT_Other(bpy.types.Panel): layout.separator() row = layout.row() - row.prop(input_tool_scene, "emailNotificationBool") + row.prop(input_tool_scene, "email_notification_bool") row.label(text="*Windows 10+ only") - if bpy.context.scene.input_tool.emailNotificationBool: + if bpy.context.scene.input_tool.email_notification_bool: row = layout.row() row.prop(input_tool_scene, "sender_from") row = layout.row() @@ -1001,6 +1099,11 @@ class BMNFTS_PT_Other(bpy.types.Panel): layout.label(text=f"**Set a Save Path in Create NFT Data to Export Settings") row = layout.row() + row.prop(input_tool_scene, "enable_debug") + if bpy.context.scene.input_tool.enable_debug: + row = layout.row() + row.prop(input_tool_scene, "log_path") + row = layout.row() row = layout.row() layout.label(text=f"Looking for help?") @@ -1010,8 +1113,12 @@ class BMNFTS_PT_Other(bpy.types.Panel): icon='URL').url = "https://github.com/torrinworx/Blend_My_NFTs" row = layout.row() - row.operator("wm.url_open", text="YouTube Tutorials", - icon='URL').url = "https://www.youtube.com/watch?v=ygKJYz4BjRs&list=PLuVvzaanutXcYtWmPVKu2bx83EYNxLRsX" + row.operator( + "wm.url_open", + text="YouTube Tutorials", + icon='URL' + ).url = "https://www.youtube.com/watch?v=ygKJYz4BjRs&list=PLuVvzaanutXcYtWmPVKu2bx83EYNxLRsX" + row = layout.row() row.operator("wm.url_open", text="Join Our Discord Community!", icon='URL').url = "https://discord.gg/UpZt5Un57t" @@ -1022,22 +1129,22 @@ class BMNFTS_PT_Other(bpy.types.Panel): # ======== Blender add-on register/unregister handling ======== # classes = ( - # Property Group Classes: - BMNFTS_PGT_Input_Properties, + # Property Group Classes: + BMNFTS_PGT_Input_Properties, - # Operator Classes: - createData, - exportNFTs, - resume_failed_batch, - refactor_Batches, - export_settings, + # Operator Classes: + CreateData, + ExportNFTs, + ResumeFailedBatch, + RefactorBatches, + ExportSettings, - # Panel Classes: - BMNFTS_PT_CreateData, - BMNFTS_PT_GenerateNFTs, - BMNFTS_PT_Refactor, - BMNFTS_PT_Other, - ) + Custom_Metadata_UIList.classes_Custom_Metadata_UIList + Logic_UIList.classes_Logic_UIList + # Panel Classes: + BMNFTS_PT_CreateData, + BMNFTS_PT_GenerateNFTs, + BMNFTS_PT_Refactor, + BMNFTS_PT_Other, +) + custom_metadata_ui_list.classes_Custom_Metadata_UIList + logic_ui_list.classes_Logic_UIList def register(): @@ -1047,10 +1154,10 @@ def register(): bpy.types.Scene.input_tool = bpy.props.PointerProperty(type=BMNFTS_PGT_Input_Properties) bpy.types.Scene.custom_metadata_fields = CollectionProperty( - type=Custom_Metadata_UIList.CUSTOM_custom_metadata_fields_objectCollection) + type=custom_metadata_ui_list.CUSTOM_custom_metadata_fields_objectCollection) bpy.types.Scene.custom_metadata_fields_index = IntProperty() - bpy.types.Scene.logic_fields = CollectionProperty(type=Logic_UIList.CUSTOM_logic_objectCollection) + bpy.types.Scene.logic_fields = CollectionProperty(type=logic_ui_list.CUSTOM_logic_objectCollection) bpy.types.Scene.logic_fields_index = IntProperty() @@ -1069,4 +1176,4 @@ def unregister(): if __name__ == '__main__': register() - runAsHeadless() + run_as_headless() diff --git a/main/Checks.py b/main/Checks.py deleted file mode 100644 index ab50e7c..0000000 --- a/main/Checks.py +++ /dev/null @@ -1,219 +0,0 @@ -# Purpose: -# The purpose of this file is to check the NFTRecord.json for duplicate NFT DNA and returns any found in the console. -# It also checks the percentage each variant is chosen in the NFTRecord, then compares it with its rarity percentage -# set in the .blend file. - -# This file is provided for transparency. The accuracy of the rarity values you set in your .blend file as outlined in -# the README.md file are dependent on the maxNFTs, and the maximum number of combinations of your NFT collection. - -import bpy -import os -import json -from collections import Counter, defaultdict - -from . import DNA_Generator, get_combinations -from .Constants import bcolors, removeList, remove_file_by_extension - - -# Checks: -def check_Scene(): # Not complete - """ - Checks if Blender file Scene follows the Blend_My_NFTs conventions. If not, raises error with all instances of - violations. - """ - - script_ignore_exists = None # True if Script_Ignore collection exists in Blender scene - attribute_naming_conventions = None # True if all attributes in Blender scene follow BMNFTs naming conventions - variant_naming_conventions = None # True if all variants in Blender scene follow BMNFTs naming conventions - object_placing_conventions = None # True if all objects are within either Script_Ignore or a variant collection - - # script_ignore_exists: - try: - scriptIgnoreCollection = bpy.data.collections["Script_Ignore"] - script_ignore_exists = True - except KeyError: - raise TypeError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"Add a Script_Ignore collection to your Blender scene and ensure the name is exactly 'Script_Ignore'. For more information, " - f"see:" - f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" - ) - - hierarchy = DNA_Generator.get_hierarchy() - collections = bpy.context.scene.collection - - # attribute_naming_conventions - -def check_Rarity(hierarchy, DNAListFormatted, save_path): - """Checks rarity percentage of each Variant, then sends it to RarityData.json in NFT_Data folder.""" - - DNAList = [] - for i in DNAListFormatted: - DNAList.append(list(i.keys())[0]) - - - numNFTsGenerated = len(DNAList) - - numDict = defaultdict(list) - - hierarchy.keys() - - for i in DNAList: - dnaSplitList = i.split("-") - - for j, k in zip(dnaSplitList, hierarchy.keys()): - numDict[k].append(j) - - numDict = dict(numDict) - - for i in numDict: - count = dict(Counter(numDict[i])) - numDict[i] = count - - fullNumName = {} - - for i in hierarchy: - fullNumName[i] = {} - for j in hierarchy[i]: - variantNum = hierarchy[i][j]["number"] - - fullNumName[i][variantNum] = j - - completeData = {} - - for i, j in zip(fullNumName, numDict): - x = {} - - for k in fullNumName[i]: - - for l in numDict[j]: - if l == k: - name = fullNumName[i][k] - num = numDict[j][l] - x[name] = [(str(round(((num/numNFTsGenerated)*100), 2)) + "%"), str(num)] - - completeData[i] = x - - print( - f"\n{bcolors.OK}\n" - f"Rarity Checker is active. These are the percentages for each variant per attribute you set in your .blend file:" - f"\n{bcolors.RESET}" - ) - - for i in completeData: - print(i + ":") - for j in completeData[i]: - print(" " + j + ": " + completeData[i][j][0] + " Occurrences: " + completeData[i][j][1]) - - jsonMetaData = json.dumps(completeData, indent=1, ensure_ascii=True) - - with open(os.path.join(save_path, "RarityData.json"), 'w') as outfile: - outfile.write(jsonMetaData + '\n') - path = os.path.join(save_path, "RarityData.json") - print(bcolors.OK + f"Rarity Data has been saved to {path}." + bcolors.RESET) - -def check_Duplicates(DNAListFormatted): - """Checks if there are duplicates in DNAList before NFTRecord.json is sent to JSON file.""" - DNAList = [] - for i in DNAListFormatted: - DNAList.append(list(i.keys())[0]) - - - duplicates = 0 - seen = set() - - for x in DNAList: - if x in seen: - print(x) - duplicates += 1 - seen.add(x) - - print(f"\nNFTRecord.json contains {duplicates} duplicate NFT DNA.") - -def check_FailedBatches(batch_json_save_path): - fail_state = False - failed_batch = None - failed_dna = None - failed_dna_index = None - - if os.path.isdir(batch_json_save_path): - batch_folders = remove_file_by_extension(os.listdir(batch_json_save_path)) - - for i in batch_folders: - batch = json.load(open(os.path.join(batch_json_save_path, i))) - NFTs_in_Batch = batch["NFTs_in_Batch"] - if "Generation Save" in batch: - dna_generated = batch["Generation Save"][-1]["DNA Generated"] - if dna_generated is not None and dna_generated < NFTs_in_Batch: - fail_state = True - failed_batch = int(i.removeprefix("Batch").removesuffix(".json")) - failed_dna = dna_generated - - return fail_state, failed_batch, failed_dna, failed_dna_index - -# Raise Errors: -def raise_Error_numBatches(maxNFTs, nftsPerBatch): - """Checks if number of Batches is less than maxNFTs, if not raises error.""" - - try: - numBatches = maxNFTs / nftsPerBatch - return numBatches - except ZeroDivisionError: - raise ZeroDivisionError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"The number of NFTs per Batch must be greater than ZERO." - f"Please review your Blender scene and ensure it follows " - f"the naming conventions and scene structure. For more information, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" - ) - -def raise_Error_ZeroCombinations(): - """Checks if combinations is greater than 0, if so, raises error.""" - if get_combinations.get_combinations() == 0: - raise ValueError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"The number of all possible combinations is ZERO. Please review your Blender scene and ensure it follows " - f"the naming conventions and scene structure. For more information, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" - ) - -def raise_Error_numBatchesGreaterThan(numBatches): - if numBatches < 1: - raise ValueError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"The number of Batches is less than 1. Please review your Blender scene and ensure it follows " - f"the naming conventions and scene structure. For more information, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}" - ) - -# Raise Warnings: - -def raise_Warning_maxNFTs(nftsPerBatch, collectionSize): - """ - Prints warning if nftsPerBatch is greater than collectionSize. - """ - - if nftsPerBatch > collectionSize: - raise ValueError( - f"\n{bcolors.WARNING}Blend_My_NFTs Warning:\n" - f"The number of NFTs Per Batch you set is smaller than the NFT Collection Size you set.\n{bcolors.RESET}" - ) - -def raise_Warning_collectionSize(DNAList, collectionSize): - """ - Prints warning if BMNFTs cannot generate requested number of NFTs from a given collectionSize. - """ - - if len(DNAList) < collectionSize: - print(f"\n{bcolors.WARNING} \nWARNING: \n" - f"Blend_My_NFTs cannot generate {collectionSize} NFTs." - f" Only {len(DNAList)} NFT DNA were generated." - - f"\nThis might be for a number of reasons:" - f"\n a) Rarity is preventing combinations from being generated (See https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n" - f"\n b) Logic is preventing combinations from being generated (See https://github.com/torrinworx/Blend_My_NFTs#logic).\n" - f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or Attributes to increase the recommended collection size.\n" - f"\n{bcolors.RESET}") diff --git a/main/Constants.py b/main/Constants.py deleted file mode 100644 index 3cc94c3..0000000 --- a/main/Constants.py +++ /dev/null @@ -1,51 +0,0 @@ -# Purpose: -# This file is for storing or updating constant values that may need to be changes depending on system requirements and -# different usecases. -import os -import json -import platform - -removeList = [".gitignore", ".DS_Store", "desktop.ini", ".ini"] - -def remove_file_by_extension(dirlist): - """ - Checks if a given directory list contains any of the files or file extensions listed above, if so, remove them from - list and return a clean dir list. These files interfer with BMNFTs operations and should be removed whenever dealing - with directories. - """ - - if str(type(dirlist)) == "": - dirlist = list(dirlist) # converts single string path to list if dir pasted as string - - return_dirs = [] - for directory in dirlist: - if not str(os.path.split(directory)[1]) in removeList: - return_dirs.append(directory) - - return return_dirs - - -class bcolors: - """ - The colour of console messages. - """ - - OK = '\033[92m' # GREEN - WARNING = '\033[93m' # YELLOW - ERROR = '\033[91m' # RED - RESET = '\033[0m' # RESET COLOR - -def save_result(result): - """ - Saves json result to json file at the specified path. - """ - file_name = "log.json" - if platform.system() == "Linux" or platform.system() == "Darwin": - path = os.path.join(os.path.join(os.path.expanduser('~')), 'Desktop', file_name) - - if platform.system() == "Windows": - path = os.path.join(os.environ["HOMEPATH"], "Desktop", file_name) - - data = json.dumps(result, indent=1, ensure_ascii=True) - with open(path, 'w') as outfile: - outfile.write(data + '\n') diff --git a/main/DNA_Generator.py b/main/DNA_Generator.py deleted file mode 100644 index d045e2f..0000000 --- a/main/DNA_Generator.py +++ /dev/null @@ -1,368 +0,0 @@ -# Purpose: -# This file generates NFT DNA based on a .blend file scene structure and exports NFTRecord.json. - -import bpy -import os -import re -import copy -import time -import json -import random -from functools import partial -from .loading_animation import Loader -from . import Rarity, Logic, Checks, Material_Generator -from .Constants import bcolors, removeList, remove_file_by_extension - - -def get_hierarchy(): - """ - Returns the hierarchy of a given Blender scene. - """ - - coll = bpy.context.scene.collection - - scriptIgnoreCollection = bpy.data.collections["Script_Ignore"] - - listAllCollInScene = [] - listAllCollections = [] - - def traverse_tree(t): - yield t - for child in t.children: - yield from traverse_tree(child) - - for c in traverse_tree(coll): - listAllCollInScene.append(c) - - for i in listAllCollInScene: - listAllCollections.append(i.name) - - listAllCollections.remove(scriptIgnoreCollection.name) - - if "Scene Collection" in listAllCollections: - listAllCollections.remove("Scene Collection") - - if "Master Collection" in listAllCollections: - listAllCollections.remove("Master Collection") - - def allScriptIgnore(scriptIgnoreCollection): - # Removes all collections, sub collections in Script_Ignore collection from listAllCollections. - - for coll in list(scriptIgnoreCollection.children): - listAllCollections.remove(coll.name) - listColl = list(coll.children) - if len(listColl) > 0: - allScriptIgnore(coll) - - allScriptIgnore(scriptIgnoreCollection) - listAllCollections.sort() - - exclude = ["_"] # Excluding characters that identify a Variant - attributeCollections = copy.deepcopy(listAllCollections) - - def filter_num(): - """ - This function removes items from 'attributeCollections' if they include values from the 'exclude' variable. - It removes child collections from the parent collections in from the "listAllCollections" list. - """ - for x in attributeCollections: - if any(a in x for a in exclude): - attributeCollections.remove(x) - - for i in range(len(listAllCollections)): - filter_num() - - attributeVariants = [x for x in listAllCollections if x not in attributeCollections] - attributeCollections1 = copy.deepcopy(attributeCollections) - - def attributeData(attributeVariants): - """ - Creates a dictionary of each attribute - """ - allAttDataList = {} - for i in attributeVariants: - # Check if name follows naming conventions: - if int(i.count("_")) > 2 and int(i.split("_")[1]) > 0: - raise Exception( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"There is a naming issue with the following Attribute/Variant: '{i}'\n" - f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - - try: - number = i.split("_")[1] - name = i.split("_")[0] - rarity = i.split("_")[2] - except IndexError: - raise Exception( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"There is a naming issue with the following Attribute/Variant: '{i}'\n" - f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - - allAttDataList[i] = {"name": name, "number": number, "rarity": rarity} - return allAttDataList - - variantMetaData = attributeData(attributeVariants) - - hierarchy = {} - for i in attributeCollections1: - colParLong = list(bpy.data.collections[str(i)].children) - colParShort = {} - for x in colParLong: - colParShort[x.name] = None - hierarchy[i] = colParShort - - for a in hierarchy: - for b in hierarchy[a]: - for x in variantMetaData: - if str(x) == str(b): - (hierarchy[a])[b] = variantMetaData[x] - - return hierarchy - - -def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials, materialsFile): - """ - Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the DNAList. - """ - - hierarchy = get_hierarchy() - - # DNA random, Rarity and Logic methods: - DataDictionary = {} - - def createDNArandom(hierarchy): - """Creates a single DNA randomly without Rarity or Logic.""" - dnaStr = "" - dnaStrList = [] - listOptionVariant = [] - - for i in hierarchy: - numChild = len(hierarchy[i]) - possibleNums = list(range(1, numChild + 1)) - listOptionVariant.append(possibleNums) - - for i in listOptionVariant: - randomVariantNum = random.choices(i, k=1) - str1 = ''.join(str(e) for e in randomVariantNum) - dnaStrList.append(str1) - - for i in dnaStrList: - num = "-" + str(i) - dnaStr += num - - dna = ''.join(dnaStr.split('-', 1)) - - return str(dna) - - def singleCompleteDNA(): - """ - This function applies Rarity and Logic to a single DNA created by createDNASingle() if Rarity or Logic specified - """ - - singleDNA = "" - # Comments for debugging random, rarity, logic, and materials. - if not enableRarity: - singleDNA = createDNArandom(hierarchy) - # print("============") - # print(f"Original DNA: {singleDNA}") - if enableRarity: - singleDNA = Rarity.createDNArarity(hierarchy) - # print(f"Rarity DNA: {singleDNA}") - - if enableLogic: - singleDNA = Logic.logicafyDNAsingle(hierarchy, singleDNA, logicFile, enableRarity, enableMaterials) - # print(f"Logic DNA: {singleDNA}") - - if enableMaterials: - singleDNA = Material_Generator.apply_materials(hierarchy, singleDNA, materialsFile, enableRarity) - # print(f"Materials DNA: {singleDNA}") - - # print("============\n") - - return singleDNA - - def create_DNAList(): - """Creates DNAList. Loops through createDNARandom() and applies Rarity, and Logic while checking if all DNA are unique""" - DNASetReturn = set() - - for i in range(collectionSize): - dnaPushToList = partial(singleCompleteDNA) - - DNASetReturn |= {''.join([dnaPushToList()]) for _ in range(collectionSize - len(DNASetReturn))} - - DNAListUnformatted = list(DNASetReturn) - - DNAListFormatted = [] - DNA_Counter = 1 - for i in DNAListUnformatted: - DNAListFormatted.append({ - i: { - "Complete": False, - "Order_Num": DNA_Counter - } - }) - - DNA_Counter += 1 - - return DNAListFormatted - - DNAList = create_DNAList() - - # Messages: - - Checks.raise_Warning_collectionSize(DNAList, collectionSize) - - # Data stored in batchDataDictionary: - DataDictionary["numNFTsGenerated"] = len(DNAList) - DataDictionary["hierarchy"] = hierarchy - DataDictionary["DNAList"] = DNAList - - return DataDictionary - - -def makeBatches(collectionSize, nftsPerBatch, save_path, batch_json_save_path): - """ - Sorts through all the batches and outputs a given number of batches depending on collectionSize and nftsPerBatch. - These files are then saved as Batch#.json files to batch_json_save_path - """ - - # Clears the Batch Data folder of Batches: - batchList = os.listdir(batch_json_save_path) - if batchList: - for i in batchList: - batch = os.path.join(batch_json_save_path, i) - if os.path.exists(batch): - os.remove( - os.path.join(batch_json_save_path, i) - ) - - Blend_My_NFTs_Output = os.path.join(save_path, "Blend_My_NFTs Output", "NFT_Data") - NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json") - DataDictionary = json.load(open(NFTRecord_save_path)) - - numNFTsGenerated = DataDictionary["numNFTsGenerated"] - hierarchy = DataDictionary["hierarchy"] - DNAList = DataDictionary["DNAList"] - - numBatches = collectionSize // nftsPerBatch - remainder_dna = collectionSize % nftsPerBatch - if remainder_dna > 0: - numBatches += 1 - - print(f"To generate batches of {nftsPerBatch} DNA sequences per batch, with a total of {numNFTsGenerated}" - f" possible NFT DNA sequences, the number of batches generated will be {numBatches}") - - batches_dna_list = [] - - for i in range(numBatches): - BatchDNAList = [] - if i != range(numBatches)[-1]: - BatchDNAList = list(DNAList[0:nftsPerBatch]) - batches_dna_list.append(BatchDNAList) - - DNAList = [x for x in DNAList if x not in BatchDNAList] - else: - BatchDNAList = DNAList - - batchDictionary = { - "NFTs_in_Batch": int(len(BatchDNAList)), - "hierarchy": hierarchy, - "BatchDNAList": BatchDNAList - } - - batchDictionary = json.dumps(batchDictionary, indent=1, ensure_ascii=True) - - with open(os.path.join(batch_json_save_path, f"Batch{i + 1}.json"), "w") as outfile: - outfile.write(batchDictionary) - - -def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, enableMaterials, - materialsFile, Blend_My_NFTs_Output, batch_json_save_path): - """ - Creates NFTRecord.json file and sends "batchDataDictionary" to it. NFTRecord.json is a permanent record of all DNA - you've generated with all attribute variants. If you add new variants or attributes to your .blend file, other scripts - need to reference this .json file to generate new DNA and make note of the new attributes and variants to prevent - repeate DNA. - """ - - # Checking Scene is compatible with BMNFTs: - Checks.check_Scene() - - # Messages: - print( - f"\n========================================\n" - f"Creating NFT Data. Generating {collectionSize} NFT DNA.\n" - ) - - if not enableRarity and not enableLogic: - print( - f"{bcolors.OK}NFT DNA will be determined randomly, no special properties or parameters are applied.\n{bcolors.RESET}") - - if enableRarity: - print(f"{bcolors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account.\n{bcolors.RESET}") - - if enableLogic: - print(f"{bcolors.OK}Logic is ON. {len(list(logicFile.keys()))} rules detected and applied.\n{bcolors.RESET}") - - time_start = time.time() - - def create_nft_data(): - try: - DataDictionary = generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials, - materialsFile) - NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json") - - # Checks: - - Checks.raise_Warning_maxNFTs(nftsPerBatch, collectionSize) - Checks.check_Duplicates(DataDictionary["DNAList"]) - Checks.raise_Error_ZeroCombinations() - - if enableRarity: - Checks.check_Rarity(DataDictionary["hierarchy"], DataDictionary["DNAList"], - os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data")) - - except FileNotFoundError: - raise FileNotFoundError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " - f"the naming conventions and scene structure. For more information, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - finally: - loading.stop() - - try: - ledger = json.dumps(DataDictionary, indent=1, ensure_ascii=True) - with open(NFTRecord_save_path, 'w') as outfile: - outfile.write(ledger + '\n') - - print( - f"\n{bcolors.OK}Blend_My_NFTs Success:\n" - f"{len(DataDictionary['DNAList'])} NFT DNA saved to {NFTRecord_save_path}. NFT DNA Successfully created.\n{bcolors.RESET}") - - except: - raise ( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " - f"the naming conventions and scene structure. For more information, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - - # Loading Animation: - loading = Loader(f'Creating NFT DNA...', '').start() - create_nft_data() - makeBatches(collectionSize, nftsPerBatch, save_path, batch_json_save_path) - loading.stop() - - time_end = time.time() - - print( - f"{bcolors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{bcolors.RESET}" - ) diff --git a/main/Exporter.py b/main/Exporter.py deleted file mode 100644 index 76a67cb..0000000 --- a/main/Exporter.py +++ /dev/null @@ -1,580 +0,0 @@ -# Purpose: -# This file takes a given Batch created by DNA_Generator.py and tells blender to render the image or export a 3D model to -# the NFT_Output folder. - -import bpy -import os -import ssl -import time -import json -import smtplib -import datetime -import platform -from .loading_animation import Loader -from .Constants import bcolors, removeList, remove_file_by_extension -from .Metadata import createCardanoMetadata, createSolanaMetaData, createErc721MetaData - - -# Save info -def save_batch(batch, file_name): - saved_batch = json.dumps(batch, indent=1, ensure_ascii=True) - - with open(os.path.join(file_name), 'w') as outfile: - outfile.write(saved_batch + '\n') - - -def save_generation_state(input): - """Saves date and time of generation start, and generation types; Images, Animations, 3D Models, and the file types for each.""" - file_name = os.path.join(input.batch_json_save_path, "Batch{}.json".format(input.batchToGenerate)) - batch = json.load(open(file_name)) - - CURRENT_TIME = datetime.datetime.now().strftime("%H:%M:%S") - CURRENT_DATE = datetime.datetime.now().strftime("%d/%m/%Y") - LOCAL_TIMEZONE = str(datetime.datetime.now(datetime.timezone.utc)) - - if "Generation Save" in batch: - batch_save_number = int(batch[f"Generation Save"].index(batch[f"Generation Save"][-1])) - else: - batch_save_number = 0 - - batch["Generation Save"] = list() - batch["Generation Save"].append({ - "Batch Save Number": batch_save_number + 1, - "DNA Generated": None, - "Generation Start Date and Time": [CURRENT_TIME, CURRENT_DATE, LOCAL_TIMEZONE], - "Render_Settings": { - "nftName": input.nftName, - "save_path": input.save_path, - "nftsPerBatch": input.nftsPerBatch, - "batchToGenerate": input.batchToGenerate, - "collectionSize": input.collectionSize, - - "Blend_My_NFTs_Output": input.Blend_My_NFTs_Output, - "batch_json_save_path": input.batch_json_save_path, - "nftBatch_save_path": input.nftBatch_save_path, - - "enableImages": input.enableImages, - "imageFileFormat": input.imageFileFormat, - - "enableAnimations": input.enableAnimations, - "animationFileFormat": input.animationFileFormat, - - "enableModelsBlender": input.enableModelsBlender, - "modelFileFormat": input.modelFileFormat, - - "enableCustomFields": input.enableCustomFields, - - "cardanoMetaDataBool": input.cardanoMetaDataBool, - "solanaMetaDataBool": input.solanaMetaDataBool, - "erc721MetaData": input.erc721MetaData, - - "cardano_description": input.cardano_description, - "solana_description": input.solana_description, - "erc721_description": input.erc721_description, - - "enableMaterials": input.enableMaterials, - "materialsFile": input.materialsFile, - - "enableLogic": input.enableLogic, - "enable_Logic_Json": input.enable_Logic_Json, - "logicFile": input.logicFile, - - "enableRarity": input.enableRarity, - - "enableAutoShutdown": input.enableAutoShutdown, - - "specify_timeBool": input.specify_timeBool, - "hours": input.hours, - "minutes": input.minutes, - - "emailNotificationBool": input.emailNotificationBool, - "sender_from": input.sender_from, - "email_password": input.email_password, - "receiver_to": input.receiver_to, - - "custom_Fields": input.custom_Fields, - }, - }) - - save_batch(batch, file_name) - - -def save_completed(full_single_dna, a, x, batch_json_save_path, batchToGenerate): - """Saves progress of rendering to batch.json file.""" - - file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batchToGenerate)) - batch = json.load(open(file_name)) - index = batch["BatchDNAList"].index(a) - batch["BatchDNAList"][index][full_single_dna]["Complete"] = True - batch["Generation Save"][-1]["DNA Generated"] = x - - save_batch(batch, file_name) - - -# Exporter functions: -def getBatchData(batchToGenerate, batch_json_save_path): - """ - Retrieves a given batches data determined by renderBatch in config.py - """ - - file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batchToGenerate)) - batch = json.load(open(file_name)) - - NFTs_in_Batch = batch["NFTs_in_Batch"] - hierarchy = batch["hierarchy"] - BatchDNAList = batch["BatchDNAList"] - - return NFTs_in_Batch, hierarchy, BatchDNAList - - -def render_and_save_NFTs(input): - """ - Renders the NFT DNA in a Batch#.json, where # is renderBatch in config.py. Turns off the viewport camera and - the render camera for all items in hierarchy. - """ - - time_start_1 = time.time() - - # If failed Batch is detected and user is resuming its generation: - if input.fail_state: - print(f"{bcolors.ERROR}\nResuming Failed Batch {input.failed_batch}\n{bcolors.RESET}") - NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.failed_batch, input.batch_json_save_path) - for a in range(input.failed_dna): - del BatchDNAList[0] - x = input.failed_dna + 1 - - # If user is generating the normal way: - else: - print(f"\nGenerating Batch {input.batchToGenerate}\n") - NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.batchToGenerate, input.batch_json_save_path) - save_generation_state(input) - x = 1 - - if input.enableMaterials: - materialsFile = json.load(open(input.materialsFile)) - - for a in BatchDNAList: - full_single_dna = list(a.keys())[0] - Order_Num = a[full_single_dna]['Order_Num'] - - # Material handling: - if input.enableMaterials: - single_dna, material_dna = full_single_dna.split(':') - - if not input.enableMaterials: - single_dna = full_single_dna - - def match_DNA_to_Variant(single_dna): - """ - Matches each DNA number separated by "-" to its attribute, then its variant. - """ - - listAttributes = list(hierarchy.keys()) - listDnaDecunstructed = single_dna.split('-') - dnaDictionary = {} - - for i, j in zip(listAttributes, listDnaDecunstructed): - dnaDictionary[i] = j - - for x in dnaDictionary: - for k in hierarchy[x]: - kNum = hierarchy[x][k]["number"] - if kNum == dnaDictionary[x]: - dnaDictionary.update({x: k}) - return dnaDictionary - - def match_materialDNA_to_Material(single_dna, material_dna, materialsFile): - """ - Matches the Material DNA to it's selected Materials unless a 0 is present meaning no material for that variant was selected. - """ - listAttributes = list(hierarchy.keys()) - listDnaDecunstructed = single_dna.split('-') - listMaterialDNADeconstructed = material_dna.split('-') - - full_dna_dict = {} - - for attribute, variant, material in zip(listAttributes, listDnaDecunstructed, listMaterialDNADeconstructed): - - for var in hierarchy[attribute]: - if hierarchy[attribute][var]['number'] == variant: - variant = var - - if material != '0': # If material is not empty - for variant_m in materialsFile: - if variant == variant_m: - # Getting Materials name from Materials index in the Materials List - materials_list = list(materialsFile[variant_m]["Material List"].keys()) - - material = materials_list[int(material) - 1] # Subtract 1 because '0' means empty mat - break - - full_dna_dict[variant] = material - - return full_dna_dict - - metadataMaterialDict = {} - - if input.enableMaterials: - materialdnaDictionary = match_materialDNA_to_Material(single_dna, material_dna, materialsFile) - - for var_mat in list(materialdnaDictionary.keys()): - if materialdnaDictionary[var_mat] != '0': - if not materialsFile[var_mat]['Variant Objects']: - """ - If objects to apply material to not specified, apply to all objects in Variant collection. - """ - metadataMaterialDict[var_mat] = materialdnaDictionary[var_mat] - - for obj in bpy.data.collections[var_mat].all_objects: - selected_object = bpy.data.objects.get(obj.name) - selected_object.active_material = bpy.data.materials[materialdnaDictionary[var_mat]] - - if materialsFile[var_mat]['Variant Objects']: - """ - If objects to apply material to are specified, apply material only to objects specified withing the Variant collection. - """ - metadataMaterialDict[var_mat] = materialdnaDictionary[var_mat] - - for obj in materialsFile[var_mat]['Variant Objects']: - selected_object = bpy.data.objects.get(obj) - selected_object.active_material = bpy.data.materials[materialdnaDictionary[var_mat]] - - # Turn off render camera and viewport camera for all collections in hierarchy - for i in hierarchy: - for j in hierarchy[i]: - try: - bpy.data.collections[j].hide_render = True - bpy.data.collections[j].hide_viewport = True - except KeyError: - raise TypeError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes to " - f"your .blned file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read your scene." - f"For more information see:{bcolors.RESET}" - f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - - dnaDictionary = match_DNA_to_Variant(single_dna) - name = input.nftName + "_" + str(Order_Num) - - # Change Text Object in Scene to match DNA string: - # Variables that can be used: full_single_dna, name, Order_Num - # ob = bpy.data.objects['Text'] # Object name - # ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob - - print(f"\n{bcolors.OK}|--- Generating NFT {x}/{NFTs_in_Batch}: {name} ---|{bcolors.RESET}") - print(f"DNA attribute list:\n{dnaDictionary}\nDNA Code:{single_dna}") - - for c in dnaDictionary: - collection = dnaDictionary[c] - if collection != '0': - bpy.data.collections[collection].hide_render = False - bpy.data.collections[collection].hide_viewport = False - - time_start_2 = time.time() - - # Main paths for batch subfolders: - batchFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate)) - - imageFolder = os.path.join(batchFolder, "Images") - animationFolder = os.path.join(batchFolder, "Animations") - modelFolder = os.path.join(batchFolder, "Models") - BMNFT_metaData_Folder = os.path.join(batchFolder, "BMNFT_metadata") - - imagePath = os.path.join(imageFolder, name) - animationPath = os.path.join(animationFolder, name) - modelPath = os.path.join(modelFolder, name) - - cardanoMetadataPath = os.path.join(batchFolder, "Cardano_metadata") - solanaMetadataPath = os.path.join(batchFolder, "Solana_metadata") - erc721MetadataPath = os.path.join(batchFolder, "Erc721_metadata") - - - def check_failed_exists(file_path): - # Delete a file if a fail state is detected and if the file being re-generated already exists. Prevents - # animations from corrupting. - - if input.fail_state: - if os.path.exists(file_path): - os.remove(file_path) - - # Generation/Rendering: - if input.enableImages: - - print(f"{bcolors.OK}---Image---{bcolors.RESET}") - - image_render_time_start = time.time() - - check_failed_exists(imagePath) - - def render_image(): - if not os.path.exists(imageFolder): - os.makedirs(imageFolder) - - bpy.context.scene.render.filepath = imagePath - bpy.context.scene.render.image_settings.file_format = input.imageFileFormat - bpy.ops.render.render(write_still=True) - - # Loading Animation: - loading = Loader(f'Rendering Image {x}/{NFTs_in_Batch}...', '').start() - render_image() - loading.stop() - - image_render_time_end = time.time() - - print( - f"{bcolors.OK}Rendered image in {image_render_time_end - image_render_time_start}s.\n{bcolors.RESET}" - ) - - if input.enableAnimations: - print(f"{bcolors.OK}---Animation---{bcolors.RESET}") - - animation_render_time_start = time.time() - - check_failed_exists(animationPath) - - def render_animation(): - if not os.path.exists(animationFolder): - os.makedirs(animationFolder) - - if input.animationFileFormat == "MP4": - bpy.context.scene.render.filepath = animationPath - bpy.context.scene.render.image_settings.file_format = "FFMPEG" - - bpy.context.scene.render.ffmpeg.format = 'MPEG4' - bpy.context.scene.render.ffmpeg.codec = 'H264' - bpy.ops.render.render(animation=True) - - elif input.animationFileFormat == 'PNG': - if not os.path.exists(animationPath): - os.makedirs(animationPath) - - bpy.context.scene.render.filepath = os.path.join(animationPath, name) - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat - bpy.ops.render.render(animation=True) - - elif input.animationFileFormat == 'TIFF': - if not os.path.exists(animationPath): - os.makedirs(animationPath) - - bpy.context.scene.render.filepath = os.path.join(animationPath, name) - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat - bpy.ops.render.render(animation=True) - - else: - bpy.context.scene.render.filepath = animationPath - bpy.context.scene.render.image_settings.file_format = input.animationFileFormat - bpy.ops.render.render(animation=True) - - # Loading Animation: - loading = Loader(f'Rendering Animation {x}/{NFTs_in_Batch}...', '').start() - render_animation() - loading.stop() - - animation_render_time_end = time.time() - - print( - f"{bcolors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s.\n{bcolors.RESET}" - ) - - if input.enableModelsBlender: - print(f"{bcolors.OK}---3D Model---{bcolors.RESET}") - - model_generation_time_start = time.time() - - def generate_models(): - if not os.path.exists(modelFolder): - os.makedirs(modelFolder) - - for i in dnaDictionary: - coll = dnaDictionary[i] - if coll != '0': - for obj in bpy.data.collections[coll].all_objects: - obj.select_set(True) - - for obj in bpy.data.collections['Script_Ignore'].all_objects: - obj.select_set(True) - - # Remove objects from 3D model export: - # remove_objects: list = [ - # ] - # - # for obj in bpy.data.objects: - # if obj.name in remove_objects: - # obj.select_set(False) - - if input.modelFileFormat == 'GLB': - check_failed_exists(f"{modelPath}.glb") - bpy.ops.export_scene.gltf(filepath=f"{modelPath}.glb", - check_existing=True, - export_format='GLB', - export_keep_originals=True, - use_selection=True) - if input.modelFileFormat == 'GLTF_SEPARATE': - check_failed_exists(f"{modelPath}.gltf") - check_failed_exists(f"{modelPath}.bin") - bpy.ops.export_scene.gltf(filepath=f"{modelPath}", - check_existing=True, - export_format='GLTF_SEPARATE', - export_keep_originals=True, - use_selection=True) - if input.modelFileFormat == 'GLTF_EMBEDDED': - check_failed_exists(f"{modelPath}.gltf") - bpy.ops.export_scene.gltf(filepath=f"{modelPath}.gltf", - check_existing=True, - export_format='GLTF_EMBEDDED', - export_keep_originals=True, - use_selection=True) - elif input.modelFileFormat == 'FBX': - check_failed_exists(f"{modelPath}.fbx") - bpy.ops.export_scene.fbx(filepath=f"{modelPath}.fbx", - check_existing=True, - use_selection=True) - elif input.modelFileFormat == 'OBJ': - check_failed_exists(f"{modelPath}.obj") - bpy.ops.export_scene.obj(filepath=f"{modelPath}.obj", - check_existing=True, - use_selection=True, ) - elif input.modelFileFormat == 'X3D': - check_failed_exists(f"{modelPath}.x3d") - bpy.ops.export_scene.x3d(filepath=f"{modelPath}.x3d", - check_existing=True, - use_selection=True) - elif input.modelFileFormat == 'STL': - check_failed_exists(f"{modelPath}.stl") - bpy.ops.export_mesh.stl(filepath=f"{modelPath}.stl", - check_existing=True, - use_selection=True) - elif input.modelFileFormat == 'VOX': - check_failed_exists(f"{modelPath}.vox") - bpy.ops.export_vox.some_data(filepath=f"{modelPath}.vox") - - # Loading Animation: - loading = Loader(f'Generating 3D model {x}/{NFTs_in_Batch}...', '').start() - generate_models() - loading.stop() - - model_generation_time_end = time.time() - - print( - f"{bcolors.OK}Generated 3D model in {model_generation_time_end - model_generation_time_start}s.\n{bcolors.RESET}" - ) - - # Generating Metadata: - if input.cardanoMetaDataBool: - if not os.path.exists(cardanoMetadataPath): - os.makedirs(cardanoMetadataPath) - createCardanoMetadata(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict, - input.custom_Fields, - input.enableCustomFields, input.cardano_description, cardanoMetadataPath) - - if input.solanaMetaDataBool: - if not os.path.exists(solanaMetadataPath): - os.makedirs(solanaMetadataPath) - createSolanaMetaData(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict, - input.custom_Fields, - input.enableCustomFields, input.solana_description, solanaMetadataPath) - - if input.erc721MetaData: - if not os.path.exists(erc721MetadataPath): - os.makedirs(erc721MetadataPath) - createErc721MetaData(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict, - input.custom_Fields, - input.enableCustomFields, input.erc721_description, erc721MetadataPath) - - if not os.path.exists(BMNFT_metaData_Folder): - os.makedirs(BMNFT_metaData_Folder) - - for b in dnaDictionary: - if dnaDictionary[b] == "0": - dnaDictionary[b] = "Empty" - - metaDataDict = {"name": name, "NFT_DNA": a, "NFT_Variants": dnaDictionary, - "Material_Attributes": metadataMaterialDict} - - jsonMetaData = json.dumps(metaDataDict, indent=1, ensure_ascii=True) - - with open(os.path.join(BMNFT_metaData_Folder, "Data_" + name + ".json"), 'w') as outfile: - outfile.write(jsonMetaData + '\n') - - print(f"Completed {name} render in {time.time() - time_start_2}s") - - save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batchToGenerate) - - x += 1 - - for i in hierarchy: - for j in hierarchy[i]: - bpy.data.collections[j].hide_render = False - bpy.data.collections[j].hide_viewport = False - - batch_complete_time = time.time() - time_start_1 - - print(f"\nAll NFTs successfully generated and sent to {input.nftBatch_save_path}" - f"\nCompleted all renders in Batch{input.batchToGenerate}.json in {batch_complete_time}s\n") - - batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1, - "Average time per generation": batch_complete_time / x - 1} - - batch_infoFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate), "batch_info.json") - save_batch(batch_info, batch_infoFolder) - - # Send Email that Batch is complete: - if input.emailNotificationBool: - port = 465 # For SSL - smtp_server = "smtp.gmail.com" - sender_email = input.sender_from # Enter your address - receiver_email = input.receiver_to # Enter receiver address - password = input.email_password - - # Get batch info for message: - if input.fail_state: - batch = input.fail_state - batchData = getBatchData(input.failed_batch, input.batch_json_save_path) - - else: - batchData = getBatchData(input.batchToGenerate, input.batch_json_save_path) - - batch = input.batchToGenerate - - generation_time = str(datetime.timedelta(seconds=batch_complete_time)) - - message = f"""\ - Subject: Batch {batch} completed {x - 1} NFTs in {generation_time} (h:m:s) - - Generation Time: - {generation_time.split(':')[0]} Hours, {generation_time.split(':')[1]} Minutes, {generation_time.split(':')[2]} Seconds - Batch Data: - - {batchData} - - This message was sent from an instance of the Blend_My_NFTs Blender add-on. - """ - - context = ssl.create_default_context() - with smtplib.SMTP_SSL(smtp_server, port, context=context) as server: - server.login(sender_email, password) - server.sendmail(sender_email, receiver_email, message) - - # Automatic Shutdown: - # If user selects automatic shutdown but did not specify time after Batch completion - def shutdown(time): - plateform = platform.system() - - if plateform == "Windows": - os.system(f"shutdown /s /t {time}") - if plateform == "Darwin": - os.system(f"shutdown /s /t {time}") - - if input.enableAutoShutdown and not input.specify_timeBool: - shutdown(0) - - # If user selects automatic shutdown and specify time after Batch completion - if input.enableAutoShutdown and input.specify_timeBool: - hours = (int(input.hours) / 60) / 60 - minutes = int(input.minutes) / 60 - total_sleep_time = hours + minutes - - # time.sleep(total_sleep_time) - - shutdown(total_sleep_time) diff --git a/main/Intermediate.py b/main/Intermediate.py deleted file mode 100644 index a5479e1..0000000 --- a/main/Intermediate.py +++ /dev/null @@ -1,79 +0,0 @@ -import json -import bpy - -from main import DNA_Generator, Exporter - - -def send_To_Record_JSON(input, reverse_order=False): - if input.enableLogic: - if input.enable_Logic_Json and input.logicFile: - input.logicFile = json.load(open(input.logicFile)) - - if input.enable_Logic_Json and not input.logicFile: - print({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.") - - if not input.enable_Logic_Json: - scn = bpy.context.scene - if reverse_order: - input.logicFile = {} - num = 1 - for i in range(scn.logic_fields_index, -1, -1): - item = scn.logic_fields[i] - - item_list1 = item.item_list1 - rule_type = item.rule_type - item_list2 = item.item_list2 - input.logicFile[f"Rule-{num}"] = { - "IF": item_list1.split(','), - rule_type: item_list2.split(',') - } - print(rule_type) - num += 1 - else: - input.logicFile = {} - num = 1 - for item in scn.logic_fields: - item_list1 = item.item_list1 - rule_type = item.rule_type - item_list2 = item.item_list2 - input.logicFile[f"Rule-{num}"] = { - "IF": item_list1.split(','), - rule_type: item_list2.split(',') - } - print(rule_type) - - num += 1 - - DNA_Generator.send_To_Record_JSON(input.collectionSize, - input.nftsPerBatch, - input.save_path, - input.enableRarity, - input.enableLogic, - input.logicFile, - input.enableMaterials, - input.materialsFile, - input.Blend_My_NFTs_Output, - input.batch_json_save_path - ) - - -def render_and_save_NFTs(input, reverse_order=False): - if input.enableCustomFields: - scn = bpy.context.scene - if reverse_order: - for i in range(scn.custom_metadata_fields_index, -1, -1): - item = scn.custom_metadata_fields[i] - if item.field_name in list(input.custom_Fields.keys()): - raise ValueError( - f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names are unique.") - else: - input.custom_Fields[item.field_name] = item.field_value - else: - for item in scn.custom_metadata_fields: - if item.field_name in list(input.custom_Fields.keys()): - raise ValueError( - f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names are unique.") - else: - input.custom_Fields[item.field_name] = item.field_value - - Exporter.render_and_save_NFTs(input) diff --git a/main/Material_Generator.py b/main/Material_Generator.py deleted file mode 100644 index c5866ad..0000000 --- a/main/Material_Generator.py +++ /dev/null @@ -1,134 +0,0 @@ -# Purpose: -# The purpose of this file is to apply the materials a user sets in a given .json file to the Variant collection objects -# also specified in the .json file. The Materialized DNA is then returned in the following format: 1-1-1:1-1-1 -# Where the numbers right of the ":" are the material numbers applied to the respective Variants to the left of the ":" - -import bpy - -import json -import random -from .Constants import bcolors, removeList, remove_file_by_extension, save_result - - -def select_material(materialList, variant, enableRarity): - """Selects a material from a passed material list. """ - material_List_Of_i = [] # List of Material names instead of order numbers - rarity_List_Of_i = [] - ifZeroBool = None - - for material in materialList: - # Material Order Number comes from index in the Material List in materials.json for a given Variant. - # material_order_num = list(materialList.keys()).index(material) - - material_List_Of_i.append(material) - - material_rarity_percent = materialList[material] - rarity_List_Of_i.append(float(material_rarity_percent)) - - print(f"MATERIAL_LIST_OF_I:{material_List_Of_i}") - print(f"RARITY_LIST_OF_I:{rarity_List_Of_i}") - - for b in rarity_List_Of_i: - if b == 0: - ifZeroBool = True - elif b != 0: - ifZeroBool = False - - if enableRarity: - try: - if ifZeroBool: - selected_material = random.choices(material_List_Of_i, k=1) - elif not ifZeroBool: - selected_material = random.choices(material_List_Of_i, weights=rarity_List_Of_i, k=1) - except IndexError: - raise IndexError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Material List of the Variant collection '{variant}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - else: - try: - selected_material = random.choices(material_List_Of_i, k=1) - except IndexError: - raise IndexError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Material List of the Variant collection '{variant}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - - return selected_material[0], materialList - -def get_variant_att_index(variant, hierarchy): - variant_attribute = None - - for attribute in hierarchy: - for variant_h in hierarchy[attribute]: - if variant_h == variant: - variant_attribute = attribute - - attribute_index = list(hierarchy.keys()).index(variant_attribute) - variant_order_num = variant.split("_")[1] - return attribute_index, variant_order_num - -def match_DNA_to_Variant(hierarchy, singleDNA): - """ - Matches each DNA number separated by "-" to its attribute, then its variant. - """ - - listAttributes = list(hierarchy.keys()) - listDnaDecunstructed = singleDNA.split('-') - dnaDictionary = {} - - for i, j in zip(listAttributes, listDnaDecunstructed): - dnaDictionary[i] = j - - for x in dnaDictionary: - for k in hierarchy[x]: - kNum = hierarchy[x][k]["number"] - if kNum == dnaDictionary[x]: - dnaDictionary.update({x: k}) - return dnaDictionary - -def apply_materials(hierarchy, singleDNA, materialsFile, enableRarity): - """ - DNA with applied material example: "1-1:1-1" : - - The Material DNA will select the material for the Variant order number in the NFT DNA based on the Variant Material - list in the Variant_Material.json file. - """ - - singleDNADict = match_DNA_to_Variant(hierarchy, singleDNA) - materialsFile = json.load(open(materialsFile)) - deconstructed_MaterialDNA = {} - - for a in singleDNADict: - complete = False - for b in materialsFile: - if singleDNADict[a] == b: - material_name, materialList, = select_material(materialsFile[b]['Material List'], b, enableRarity) - material_order_num = list(materialList.keys()).index(material_name) # Gets the Order Number of the Material - deconstructed_MaterialDNA[a] = str(material_order_num + 1) - complete = True - if not complete: - deconstructed_MaterialDNA[a] = "0" - - # This section is now incorrect and needs updating: - - # Make Attributes have the same materials: - # Order your Attributes alphabetically, then assign each Attribute a number, starting with 0. So Attribute 'A' = 0, - # Attribute 'B' = 1, 'C' = 2, 'D' = 3, etc. For each pair you want to equal another, add its number it to this list: - # synced_material_attributes = [1, 2] - # - # first_mat = deconstructed_MaterialDNA[synced_material_attributes[0]] - # for i in synced_material_attributes: - # deconstructed_MaterialDNA[i] = first_mat - - material_DNA = "" - for a in deconstructed_MaterialDNA: - num = "-" + str(deconstructed_MaterialDNA[a]) - material_DNA += num - material_DNA = ''.join(material_DNA.split('-', 1)) - - return f"{singleDNA}:{material_DNA}" \ No newline at end of file diff --git a/main/Metadata.py b/main/Metadata.py deleted file mode 100644 index 618e028..0000000 --- a/main/Metadata.py +++ /dev/null @@ -1,143 +0,0 @@ -# Some code in this file was generously sponsored by the amazing team over at SolSweepers! -# Feel free to check out their amazing project and see how they are using Blend_My_NFTs: -# https://discord.gg/QTT7dzcuVs - -# Purpose: -# This file returns the specified meta data format to the Exporter.py for a given NFT DNA. - -import bpy -import os -import json - -def sendMetaDataToJson(metaDataDict, save_path, file_name): - jsonMetaData = json.dumps(metaDataDict, indent=1, ensure_ascii=True) - with open(os.path.join(save_path, f"{file_name}.json"), 'w') as outfile: - outfile.write(jsonMetaData + '\n') - -def stripNums(variant): - variant = str(variant).split('_')[0] - return variant - -# Cardano Template -def createCardanoMetadata(name, Order_Num, NFT_DNA, NFT_Variants, Material_Attributes, - custom_Fields, enableCustomFields, cardano_description, cardanoMetadataPath): - - metaDataDictCardano = {"721": { - "": { - name: { - "name": name, - "image": "", - "mediaType": "", - "description": cardano_description, - } - }, - "version": "1.0" - }} - - # Variants and Attributes: - for i in NFT_Variants: - metaDataDictCardano["721"][""][name][i] = stripNums(NFT_Variants[i]) - - # Material Variants and Attributes: - for i in Material_Attributes: - metaDataDictCardano["721"][""][name][i] = Material_Attributes[i] - - # Custom Fields: - if enableCustomFields: - for i in custom_Fields: - metaDataDictCardano["721"][""][name][i] = custom_Fields[i] - - sendMetaDataToJson(metaDataDictCardano, cardanoMetadataPath, name) - - -# Solana Template -def createSolanaMetaData(name, Order_Num, NFT_DNA, NFT_Variants, Material_Attributes, custom_Fields, enableCustomFields, - solana_description, solanaMetadataPath): - metaDataDictSolana = {"name": name, "symbol": "", "description": solana_description, "seller_fee_basis_points": None, - "image": "", "animation_url": "", "external_url": ""} - - attributes = [] - - # Variant and Attributes: - for i in NFT_Variants: - dictionary = { - "trait_type": i, - "value": stripNums(NFT_Variants[i]) - } - attributes.append(dictionary) - - # Material Variants and Attributes: - for i in Material_Attributes: - dictionary = { - "trait_type": i, - "value": Material_Attributes[i] - } - attributes.append(dictionary) - - # Custom Fields: - if enableCustomFields: - for i in custom_Fields: - dictionary = { - "trait_type": i, - "value": custom_Fields[i] - } - attributes.append(dictionary) - - metaDataDictSolana["attributes"] = attributes - metaDataDictSolana["collection"] = { - "name": "", - "family": "" - } - - metaDataDictSolana["properties"] = { - "files": [{"uri": "", "type": ""}], - "category": "", - "creators": [{"address": "", "share": None}] - } - - sendMetaDataToJson(metaDataDictSolana, solanaMetadataPath, name) - - -# ERC721 Template -def createErc721MetaData(name, Order_Num, NFT_DNA, NFT_Variants, Material_Attributes, custom_Fields, enableCustomFields, - erc721_description, erc721MetadataPath): - metaDataDictErc721 = { - "name": name, - "description": erc721_description, - "image": "", - "attributes": None, - } - - attributes = [] - - # Variants and Attributes: - for i in NFT_Variants: - dictionary = { - "trait_type": i, - "value": stripNums(NFT_Variants[i]) - } - - attributes.append(dictionary) - - # Material Variants and Attributes: - for i in Material_Attributes: - dictionary = { - "trait_type": i, - "value": Material_Attributes[i] - } - - attributes.append(dictionary) - - # Custom Fields: - if enableCustomFields: - for i in custom_Fields: - dictionary = { - "trait_type": i, - "value": custom_Fields[i] - } - attributes.append(dictionary) - - metaDataDictErc721["attributes"] = attributes - - sendMetaDataToJson(metaDataDictErc721, erc721MetadataPath, name) - diff --git a/main/Rarity.py b/main/Rarity.py deleted file mode 100644 index eff5c7c..0000000 --- a/main/Rarity.py +++ /dev/null @@ -1,50 +0,0 @@ -# Purpose: -# This file sorts the Variants in DNA slots based on the rarity value set in the name. - -import bpy -import random - -from .Constants import bcolors, removeList, remove_file_by_extension - - -def createDNArarity(hierarchy): - """ - Sorts through DataDictionary and appropriately weights each variant based on their rarity percentage set in Blender - ("rarity" in DNA_Generator). Then - """ - singleDNA = "" - - for i in hierarchy: - number_List_Of_i = [] - rarity_List_Of_i = [] - ifZeroBool = None - - for k in hierarchy[i]: - number = hierarchy[i][k]["number"] - number_List_Of_i.append(number) - - rarity = hierarchy[i][k]["rarity"] - rarity_List_Of_i.append(float(rarity)) - - for x in rarity_List_Of_i: - if x == 0: - ifZeroBool = True - elif x != 0: - ifZeroBool = False - - try: - if ifZeroBool: - variantByNum = random.choices(number_List_Of_i, k=1) - elif not ifZeroBool: - variantByNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1) - except IndexError: - raise IndexError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Attribute collection '{i}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" - ) - - singleDNA += "-" + str(variantByNum[0]) - singleDNA = ''.join(singleDNA.split('-', 1)) - return singleDNA \ No newline at end of file diff --git a/main/Refactorer.py b/main/Refactorer.py deleted file mode 100644 index 6566753..0000000 --- a/main/Refactorer.py +++ /dev/null @@ -1,48 +0,0 @@ -# Purpose: -# This file goes through all batches, renames, and sorts all nft files to a Complete_Collection folder in Blend_My_NFTs - -import bpy -import os -import json -import shutil - -from .Constants import bcolors, removeList, remove_file_by_extension - - -def reformatNFTCollection(refactor_panel_input): - completeCollPath = os.path.join(refactor_panel_input.save_path, "Blend_My_NFTs Output", "Complete_Collection") - - if not os.path.exists(completeCollPath): - os.mkdir(completeCollPath) - - batchListDirty = os.listdir(refactor_panel_input.nftBatch_save_path) - batchList = remove_file_by_extension(batchListDirty) - collection_info = {"Total Time": 0} - - for folder in batchList: - batch_info = json.load(open(os.path.join(refactor_panel_input.nftBatch_save_path, folder, "batch_info.json"))) - collection_info[os.path.basename(folder)] = batch_info - collection_info["Total Time"] = collection_info["Total Time"] + batch_info["Batch Render Time"] - - fileListDirty = os.listdir(os.path.join(refactor_panel_input.nftBatch_save_path, folder)) - filelist = remove_file_by_extension(fileListDirty) - - for mediaTypeFolder in filelist: - if mediaTypeFolder != "batch_info.json": - mediaTypeFolderDir = os.path.join(refactor_panel_input.nftBatch_save_path, folder, mediaTypeFolder) - - for i in os.listdir(mediaTypeFolderDir): - destination = os.path.join(completeCollPath, mediaTypeFolder) - if not os.path.exists(destination): - os.makedirs(destination) - - shutil.move(os.path.join(mediaTypeFolderDir, i), destination) - - collection_info = json.dumps(collection_info, indent=1, ensure_ascii=True) - with open(os.path.join(completeCollPath, "collection_info.json"), 'w') as outfile: - outfile.write(collection_info + '\n') - - print(f"All NFT files stored and sorted to the Complete_Collection folder in {refactor_panel_input.save_path}") - - shutil.rmtree(refactor_panel_input.nftBatch_save_path) - diff --git a/main/dna_generator.py b/main/dna_generator.py new file mode 100644 index 0000000..f54a2ae --- /dev/null +++ b/main/dna_generator.py @@ -0,0 +1,359 @@ +# Purpose: +# This file generates NFT DNA based on a .blend file scene structure and exports NFTRecord.json. + +import os +import time +import json +import random +import logging +import traceback +from functools import partial + +from . import logic, material_generator, helpers +from .helpers import TextColors + +log = logging.getLogger(__name__) + + +def generate_nft_dna( + collection_size, + enable_rarity, + enable_logic, + logic_file, + enable_materials, + materials_file, +): + """ + Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the dna_list. + """ + + hierarchy = helpers.get_hierarchy() + + # DNA random, Rarity and Logic methods: + data_dictionary = {} + + def create_dna_random(hierarchy): + """Creates a single DNA randomly without Rarity or Logic.""" + dna_str = "" + dna_str_list = [] + list_option_variant = [] + + for i in hierarchy: + num_child = len(hierarchy[i]) + possible_nums = list(range(1, num_child + 1)) + list_option_variant.append(possible_nums) + + for i in list_option_variant: + random_variant_num = random.choices(i, k=1) + str1 = ''.join(str(e) for e in random_variant_num) + dna_str_list.append(str1) + + for i in dna_str_list: + num = "-" + str(i) + dna_str += num + + dna = ''.join(dna_str.split('-', 1)) + + return str(dna) + + def create_dna_rarity(hierarchy): + """ + Sorts through data_dictionary and appropriately weights each variant based on their rarity percentage set in Blender + ("rarity" in DNA_Generator). Then + """ + single_dna = "" + + for i in hierarchy: + number_list_of_i = [] + rarity_list_of_i = [] + if_zero_bool = None + + for k in hierarchy[i]: + number = hierarchy[i][k]["number"] + number_list_of_i.append(number) + + rarity = hierarchy[i][k]["rarity"] + rarity_list_of_i.append(float(rarity)) + + for x in rarity_list_of_i: + if x == 0: + if_zero_bool = True + elif x != 0: + if_zero_bool = False + + try: + if if_zero_bool: + variant_by_num = random.choices(number_list_of_i, k=1) + elif not if_zero_bool: + variant_by_num = random.choices(number_list_of_i, weights=rarity_list_of_i, k=1) + except IndexError: + raise IndexError( + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"An issue was found within the Attribute collection '{i}'. For more information on Blend_My_NFTs " + f"compatible scenes, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + + single_dna += "-" + str(variant_by_num[0]) + single_dna = ''.join(single_dna.split('-', 1)) + return single_dna + + def single_complete_dna(): + """ + This function applies Rarity and Logic to a single DNA created by createDNASingle() if Rarity or Logic specified + """ + + single_dna = "" + if not enable_rarity: + single_dna = create_dna_random(hierarchy) + log.debug( + f"\n================" + f"\nOriginal DNA: {single_dna}" + ) + + if enable_rarity: + single_dna = create_dna_rarity(hierarchy) + log.debug( + f"\n================" + f"\nRarity DNA: {single_dna}" + ) + + if enable_logic: + single_dna = logic.logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity) + log.debug( + f"\n================" + f"\nLogic DNA: {single_dna}" + ) + + if enable_materials: + single_dna = material_generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity) + log.debug( + f"\n================" + f"\nMaterials DNA: {single_dna}" + f"\n================\n" + + ) + + return single_dna + + def create_dna_list(): + """ + Creates dna_list. Loops through createDNARandom() and applies Rarity, and Logic while checking if all DNA + are unique. + """ + dna_set_return = set() + + for i in range(collection_size): + dna_push_to_list = partial(single_complete_dna) + + dna_set_return |= {''.join([dna_push_to_list()]) for _ in range(collection_size - len(dna_set_return))} + + dna_list_non_formatted = list(dna_set_return) + + dna_list_formatted = [] + dna_counter = 1 + for i in dna_list_non_formatted: + dna_list_formatted.append({ + i: { + "complete": False, + "order_num": dna_counter + } + }) + + dna_counter += 1 + + return dna_list_formatted + + dna_list = create_dna_list() + + helpers.raise_warning_collection_size(dna_list, collection_size) + + # Data stored in batchDataDictionary: + data_dictionary["num_nfts_generated"] = len(dna_list) + data_dictionary["hierarchy"] = hierarchy + data_dictionary["dna_list"] = dna_list + + return data_dictionary + + +def make_batches( + collection_size, + nfts_per_batch, + save_path, + batch_json_save_path +): + """ + Sorts through all the batches and outputs a given number of batches depending on collection_size and nfts_per_batch. + These files are then saved as Batch#.json files to batch_json_save_path + """ + + # Clears the Batch Data folder of Batches: + batch_list = os.listdir(batch_json_save_path) + if batch_list: + for i in batch_list: + batch = os.path.join(batch_json_save_path, i) + if os.path.exists(batch): + os.remove( + os.path.join(batch_json_save_path, i) + ) + + blend_my_nf_ts_output = os.path.join(save_path, "Blend_My_NFTs Output", "NFT_Data") + nft_record_save_path = os.path.join(blend_my_nf_ts_output, "NFTRecord.json") + data_dictionary = json.load(open(nft_record_save_path)) + + hierarchy = data_dictionary["hierarchy"] + dna_list = data_dictionary["dna_list"] + + num_batches = collection_size // nfts_per_batch + remainder_dna = collection_size % nfts_per_batch + if remainder_dna > 0: + num_batches += 1 + + log.info( + f"\nGenerating {num_batches} batch files. If the last batch isn't filled all the way the program will " + f"operate normally." + ) + + batches_dna_list = [] + + for i in range(num_batches): + if i != range(num_batches)[-1]: + batch_dna_list = list(dna_list[0:nfts_per_batch]) + batches_dna_list.append(batch_dna_list) + + dna_list = [x for x in dna_list if x not in batch_dna_list] + else: + batch_dna_list = dna_list + + batch_dictionary = { + "nfts_in_batch": int(len(batch_dna_list)), + "hierarchy": hierarchy, + "batch_dna_list": batch_dna_list + } + + batch_dictionary = json.dumps(batch_dictionary, indent=1, ensure_ascii=True) + + with open(os.path.join(batch_json_save_path, f"Batch{i + 1}.json"), "w") as outfile: + outfile.write(batch_dictionary) + + +def send_to_record( + collection_size, + nfts_per_batch, + save_path, + enable_rarity, + enable_logic, + logic_file, + enable_materials, + materials_file, + blend_my_nfts_output, + batch_json_save_path, + enable_debug, + log_path +): + """ + Creates NFTRecord.json file and sends "batch_data_dictionary" to it. NFTRecord.json is a permanent record of all DNA + you've generated with all attribute variants. If you add new variants or attributes to your .blend file, other scripts + need to reference this .json file to generate new DNA and make note of the new attributes and variants to prevent + repeat DNA. + """ + + # Checking Scene is compatible with BMNFTs: + helpers.check_scene() + + # Messages: + log.info( + f"\n{TextColors.OK}======== Creating NFT Data ({collection_size} DNA) ========{TextColors.RESET}" + ) + + if not enable_rarity and not enable_logic: + log.info( + f"\n - NFT DNA will be determined randomly, no special properties or parameters are " + f"applied." + ) + + if enable_rarity: + log.info( + f"\n - Rarity is ON. Weights listed in .blend scene will be taken into account." + f"" + ) + + if enable_logic: + log.info( + f"\n - Logic is ON. {len(list(logic_file.keys()))} rules detected, implementation will " + f"be attempted." + ) + + if enable_materials: + log.info( + f"\n - Materials are ON. {len(list(json.load(open(materials_file)).keys()))} materials " + f"instances detected, implementation will be attempted." + ) + time_start = time.time() + + def create_nft_data(): + try: + data_dictionary = generate_nft_dna( + collection_size, + enable_rarity, + enable_logic, + logic_file, + enable_materials, + materials_file, + ) + nft_record_save_path = os.path.join(blend_my_nfts_output, "NFTRecord.json") + + # Checks: + helpers.raise_warning_max_nfts(nfts_per_batch, collection_size) + helpers.check_duplicates(data_dictionary["dna_list"]) + helpers.raise_error_zero_combinations() + + if enable_rarity: + helpers.check_rarity(data_dictionary["hierarchy"], data_dictionary["dna_list"], + os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data")) + + except FileNotFoundError: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"Data not saved to NFTRecord.json, file not found. Check that your save path, logic file path, or " + f"materials file path is correct. For more information, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + raise + + finally: + loading.stop() + + try: + ledger = json.dumps(data_dictionary, indent=1, ensure_ascii=True) + with open(nft_record_save_path, 'w') as outfile: + outfile.write(ledger + '\n') + + log.info( + f"\n{TextColors.OK}{len(data_dictionary['dna_list'])} NFT data successfully saved to:" + f"\n{nft_record_save_path}{TextColors.RESET}" + ) + + except Exception: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows " + f"the naming conventions and scene structure. For more information, " + f"see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + raise + + # Loading Animation: + loading = helpers.Loader(f'\nCreating NFT DNA...', '').start() + create_nft_data() + make_batches(collection_size, nfts_per_batch, save_path, batch_json_save_path) + loading.stop() + + time_end = time.time() + + log.info( + f"\n{TextColors.OK}TIME [Created and Saved NFT data]: {time_end - time_start}s.\n{TextColors.RESET}" + ) diff --git a/main/exporter.py b/main/exporter.py new file mode 100644 index 0000000..377d57f --- /dev/null +++ b/main/exporter.py @@ -0,0 +1,670 @@ +# Purpose: +# This file takes a given Batch created by dna_generator.py and tells blender to render the image or export a 3D model +# to the NFT_Output folder. + +import bpy +import os +import ssl +import time +import json +import smtplib +import logging +import datetime +import platform +import traceback + +from .helpers import TextColors, Loader +from .metadata_templates import create_cardano_metadata, createSolanaMetaData, create_erc721_meta_data + +log = logging.getLogger(__name__) + + +# Save info +def save_batch(batch, file_name): + saved_batch = json.dumps(batch, indent=1, ensure_ascii=True) + + with open(os.path.join(file_name), 'w') as outfile: + outfile.write(saved_batch + '\n') + + +def save_generation_state(input): + """ + Saves date and time of generation start, and generation types; Images, Animations, 3D Models, and the file types for + each. + """ + file_name = os.path.join(input.batch_json_save_path, "Batch{}.json".format(input.batch_to_generate)) + batch = json.load(open(file_name)) + + current_time = datetime.datetime.now().strftime("%H:%M:%S") + current_date = datetime.datetime.now().strftime("%d/%m/%Y") + local_timezone = str(datetime.datetime.now(datetime.timezone.utc)) + + if "Generation Save" in batch: + batch_save_number = int(batch[f"Generation Save"].index(batch[f"Generation Save"][-1])) + else: + batch_save_number = 0 + + batch["Generation Save"] = list() + batch["Generation Save"].append({ + "Batch Save Number": batch_save_number + 1, + "DNA Generated": None, + "Generation Start Date and Time": [current_time, current_date, local_timezone], + "Render_Settings": { + "nft_name": input.nft_name, + "save_path": input.save_path, + "nfts_per_batch": input.nfts_per_batch, + "batch_to_generate": input.batch_to_generate, + "collection_size": input.collection_size, + + "blend_my_nfts_output": input.blend_my_nfts_output, + "batch_json_save_path": input.batch_json_save_path, + "nft_batch_save_path": input.nft_batch_save_path, + + "enable_images": input.enable_images, + "image_file_format": input.image_file_format, + + "enable_animations": input.enable_animations, + "animation_file_format": input.animation_file_format, + + "enable_models": input.enable_models, + "model_file_format": input.model_file_format, + + "enable_custom_fields": input.enable_custom_fields, + + "cardano_metadata_bool": input.cardano_metadata_bool, + "solana_metadata_bool": input.solana_metadata_bool, + "erc721_metadata": input.erc721_metadata, + + "cardano_description": input.cardano_description, + "solana_description": input.solana_description, + "erc721_description": input.erc721_description, + + "enable_materials": input.enable_materials, + "materials_file": input.materials_file, + + "enable_logic": input.enable_logic, + "enable_logic_json": input.enable_logic_json, + "logic_file": input.logic_file, + + "enable_rarity": input.enable_rarity, + + "enable_auto_shutdown": input.enable_auto_shutdown, + + "specify_time_bool": input.specify_time_bool, + "hours": input.hours, + "minutes": input.minutes, + + "email_notification_bool": input.email_notification_bool, + "sender_from": input.sender_from, + "email_password": input.email_password, + "receiver_to": input.receiver_to, + + "enable_debug": input.enable_debug, + "log_path": input.log_path, + + "enable_dry_run": input.enable_dry_run, + + "custom_fields": input.custom_fields, + }, + }) + + save_batch(batch, file_name) + + +def save_completed(full_single_dna, a, x, batch_json_save_path, batch_to_generate): + """Saves progress of rendering to batch.json file.""" + + file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batch_to_generate)) + batch = json.load(open(file_name)) + index = batch["batch_dna_list"].index(a) + batch["batch_dna_list"][index][full_single_dna]["complete"] = True + batch["Generation Save"][-1]["DNA Generated"] = x + + save_batch(batch, file_name) + + +# Exporter functions: +def get_batch_data(batch_to_generate, batch_json_save_path): + """ + Retrieves a given batches data determined by renderBatch in config.py + """ + + file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batch_to_generate)) + batch = json.load(open(file_name)) + + nfts_in_batch = batch["nfts_in_batch"] + hierarchy = batch["hierarchy"] + batch_dna_list = batch["batch_dna_list"] + + return nfts_in_batch, hierarchy, batch_dna_list + + +def render_and_save_nfts(input): + """ + Renders the NFT DNA in a Batch#.json, where # is renderBatch in config.py. Turns off the viewport camera and + the render camera for all items in hierarchy. + """ + + time_start_1 = time.time() + + # If failed Batch is detected and user is resuming its generation: + if input.fail_state: + log.info( + f"{TextColors.OK}\nResuming Batch #{input.failed_batch}{TextColors.RESET}" + ) + nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.failed_batch, input.batch_json_save_path) + for a in range(input.failed_dna): + del batch_dna_list[0] + x = input.failed_dna + 1 + + # If user is generating the normal way: + else: + log.info( + f"{TextColors.OK}\n======== Generating Batch #{input.batch_to_generate} ========{TextColors.RESET}" + ) + nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.batch_to_generate, input.batch_json_save_path) + save_generation_state(input) + x = 1 + + if input.enable_materials: + materials_file = json.load(open(input.materials_file)) + + for a in batch_dna_list: + full_single_dna = list(a.keys())[0] + order_num = a[full_single_dna]['order_num'] + + # Material handling: + if input.enable_materials: + single_dna, material_dna = full_single_dna.split(':') + + if not input.enable_materials: + single_dna = full_single_dna + + def match_dna_to_variant(single_dna): + """ + Matches each DNA number separated by "-" to its attribute, then its variant. + """ + + list_attributes = list(hierarchy.keys()) + list_dna_deconstructed = single_dna.split('-') + dna_dictionary = {} + + for i, j in zip(list_attributes, list_dna_deconstructed): + dna_dictionary[i] = j + + for x in dna_dictionary: + for k in hierarchy[x]: + k_num = hierarchy[x][k]["number"] + if k_num == dna_dictionary[x]: + dna_dictionary.update({x: k}) + return dna_dictionary + + def match_material_dna_to_material(single_dna, material_dna, materials_file): + """ + Matches the Material DNA to it's selected Materials unless a 0 is present meaning no material for that variant was selected. + """ + list_attributes = list(hierarchy.keys()) + list_dna_deconstructed = single_dna.split('-') + list_material_dna_deconstructed = material_dna.split('-') + + full_dna_dict = {} + + for attribute, variant, material in zip( + list_attributes, + list_dna_deconstructed, + list_material_dna_deconstructed + ): + + for var in hierarchy[attribute]: + if hierarchy[attribute][var]['number'] == variant: + variant = var + + if material != '0': # If material is not empty + for variant_m in materials_file: + if variant == variant_m: + # Getting Materials name from Materials index in the Materials List + materials_list = list(materials_file[variant_m]["Material List"].keys()) + + material = materials_list[int(material) - 1] # Subtract 1 because '0' means empty mat + break + + full_dna_dict[variant] = material + + return full_dna_dict + + metadata_material_dict = {} + + if input.enable_materials: + material_dna_dictionary = match_material_dna_to_material(single_dna, material_dna, materials_file) + + for var_mat in list(material_dna_dictionary.keys()): + if material_dna_dictionary[var_mat]!='0': + if not materials_file[var_mat]['Variant Objects']: + """ + If objects to apply material to not specified, apply to all objects in Variant collection. + """ + metadata_material_dict[var_mat] = material_dna_dictionary[var_mat] + + for obj in bpy.data.collections[var_mat].all_objects: + selected_object = bpy.data.objects.get(obj.name) + selected_object.active_material = bpy.data.materials[material_dna_dictionary[var_mat]] + + if materials_file[var_mat]['Variant Objects']: + """ + If objects to apply material to are specified, apply material only to objects specified withing + the Variant collection. + """ + metadata_material_dict[var_mat] = material_dna_dictionary[var_mat] + + for obj in materials_file[var_mat]['Variant Objects']: + selected_object = bpy.data.objects.get(obj) + selected_object.active_material = bpy.data.materials[material_dna_dictionary[var_mat]] + + # Turn off render camera and viewport camera for all collections in hierarchy + for i in hierarchy: + for j in hierarchy[i]: + try: + bpy.data.collections[j].hide_render = True + bpy.data.collections[j].hide_viewport = True + except KeyError: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes " + f"to your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read " + f"your scene. For more information see:{TextColors.RESET}" + f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + raise TypeError() + + dna_dictionary = match_dna_to_variant(single_dna) + name = input.nft_name + "_" + str(order_num) + + # Change Text Object in Scene to match DNA string: + # Variables that can be used: full_single_dna, name, order_num + # ob = bpy.data.objects['Text'] # Object name + # ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob + + log.info( + f"\n{TextColors.OK}======== Generating NFT {x}/{nfts_in_batch}: {name} ========{TextColors.RESET}" + f"\nVariants selected:" + f"\n{dna_dictionary}" + ) + if input.enable_materials: + log.info( + f"\nMaterials selected:" + f"\n{material_dna_dictionary}" + ) + + log.info(f"\nDNA Code:{full_single_dna}") + + for c in dna_dictionary: + collection = dna_dictionary[c] + if collection != '0': + bpy.data.collections[collection].hide_render = False + bpy.data.collections[collection].hide_viewport = False + + time_start_2 = time.time() + + # Main paths for batch sub-folders: + batch_folder = os.path.join(input.nft_batch_save_path, "Batch" + str(input.batch_to_generate)) + + image_folder = os.path.join(batch_folder, "Images") + animation_folder = os.path.join(batch_folder, "Animations") + model_folder = os.path.join(batch_folder, "Models") + bmnft_data_folder = os.path.join(batch_folder, "BMNFT_data") + + image_path = os.path.join(image_folder, name) + animation_path = os.path.join(animation_folder, name) + model_path = os.path.join(model_folder, name) + + cardano_metadata_path = os.path.join(batch_folder, "Cardano_metadata") + solana_metadata_path = os.path.join(batch_folder, "Solana_metadata") + erc721_metadata_path = os.path.join(batch_folder, "Erc721_metadata") + + def check_failed_exists(file_path): + """ + Delete a file if a fail state is detected and if the file being re-generated already exists. Prevents + animations from corrupting. + """ + if input.fail_state: + if os.path.exists(file_path): + os.remove(file_path) + + # Generation/Rendering: + if input.enable_images: + + log.info(f"\n{TextColors.OK}-------- Image --------{TextColors.RESET}") + + image_render_time_start = time.time() + + check_failed_exists(image_path) + + def render_image(): + if not os.path.exists(image_folder): + os.makedirs(image_folder) + + bpy.context.scene.render.filepath = image_path + bpy.context.scene.render.image_settings.file_format = input.image_file_format + + if not input.enable_debug: + bpy.ops.render.render(write_still=True) + + # Loading Animation: + loading = Loader(f'Rendering Image {x}/{nfts_in_batch}...', '').start() + render_image() + loading.stop() + + image_render_time_end = time.time() + + log.info( + f"{TextColors.OK}TIME [Rendered Image]: {image_render_time_end - image_render_time_start}s." + f"\n{TextColors.RESET}" + ) + + if input.enable_animations: + log.info(f"\n{TextColors.OK}-------- Animation --------{TextColors.RESET}") + + animation_render_time_start = time.time() + + check_failed_exists(animation_path) + + def render_animation(): + if not os.path.exists(animation_folder): + os.makedirs(animation_folder) + + if not input.enable_debug: + if input.animation_file_format == 'MP4': + bpy.context.scene.render.filepath = animation_path + bpy.context.scene.render.image_settings.file_format = "FFMPEG" + + bpy.context.scene.render.ffmpeg.format = 'MPEG4' + bpy.context.scene.render.ffmpeg.codec = 'H264' + bpy.ops.render.render(animation=True) + + elif input.animation_file_format == 'PNG': + if not os.path.exists(animation_path): + os.makedirs(animation_path) + + bpy.context.scene.render.filepath = os.path.join(animation_path, name) + bpy.context.scene.render.image_settings.file_format = input.animation_file_format + bpy.ops.render.render(animation=True) + + elif input.animation_file_format == 'TIFF': + if not os.path.exists(animation_path): + os.makedirs(animation_path) + + bpy.context.scene.render.filepath = os.path.join(animation_path, name) + bpy.context.scene.render.image_settings.file_format = input.animation_file_format + bpy.ops.render.render(animation=True) + + else: + bpy.context.scene.render.filepath = animation_path + bpy.context.scene.render.image_settings.file_format = input.animation_file_format + bpy.ops.render.render(animation=True) + + # Loading Animation: + loading = Loader(f'Rendering Animation {x}/{nfts_in_batch}...', '').start() + render_animation() + loading.stop() + + animation_render_time_end = time.time() + + log.info( + f"\n{TextColors.OK}TIME [Rendered Animation]: " + f"{animation_render_time_end - animation_render_time_start}s.{TextColors.RESET}" + ) + + if input.enable_models: + log.info(f"\n{TextColors.OK}-------- 3D Model --------{TextColors.RESET}") + + model_generation_time_start = time.time() + + def generate_models(): + if not os.path.exists(model_folder): + os.makedirs(model_folder) + + for i in dna_dictionary: + coll = dna_dictionary[i] + if coll != '0': + for obj in bpy.data.collections[coll].all_objects: + obj.select_set(True) + + for obj in bpy.data.collections['Script_Ignore'].all_objects: + obj.select_set(True) + + # Remove objects from 3D model export: + # remove_objects: list = [ + # ] + # + # for obj in bpy.data.objects: + # if obj.name in remove_objects: + # obj.select_set(False) + + if not input.enable_debug: + if input.model_file_format == 'GLB': + check_failed_exists(f"{model_path}.glb") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}.glb", + check_existing=True, + export_format='GLB', + export_keep_originals=True, + use_selection=True + ) + if input.model_file_format == 'GLTF_SEPARATE': + check_failed_exists(f"{model_path}.gltf") + check_failed_exists(f"{model_path}.bin") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}", + check_existing=True, + export_format='GLTF_SEPARATE', + export_keep_originals=True, + use_selection=True + ) + if input.model_file_format == 'GLTF_EMBEDDED': + check_failed_exists(f"{model_path}.gltf") + bpy.ops.export_scene.gltf( + filepath=f"{model_path}.gltf", + check_existing=True, + export_format='GLTF_EMBEDDED', + export_keep_originals=True, + use_selection=True + ) + elif input.model_file_format == 'FBX': + check_failed_exists(f"{model_path}.fbx") + bpy.ops.export_scene.fbx( + filepath=f"{model_path}.fbx", + check_existing=True, + use_selection=True + ) + elif input.model_file_format == 'OBJ': + check_failed_exists(f"{model_path}.obj") + bpy.ops.export_scene.obj( + filepath=f"{model_path}.obj", + check_existing=True, + use_selection=True, + ) + elif input.model_file_format == 'X3D': + check_failed_exists(f"{model_path}.x3d") + bpy.ops.export_scene.x3d( + filepath=f"{model_path}.x3d", + check_existing=True, + use_selection=True + ) + elif input.model_file_format == 'STL': + check_failed_exists(f"{model_path}.stl") + bpy.ops.export_mesh.stl( + filepath=f"{model_path}.stl", + check_existing=True, + use_selection=True + ) + elif input.model_file_format == 'VOX': + check_failed_exists(f"{model_path}.vox") + bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox") + + # Loading Animation: + loading = Loader(f'Generating 3D model {x}/{nfts_in_batch}...', '').start() + generate_models() + loading.stop() + + model_generation_time_end = time.time() + + log.info( + f"\n{TextColors.OK}TIME [Generated 3D Model]: " + f"{model_generation_time_end - model_generation_time_start}s.{TextColors.RESET}" + ) + + # Generating Metadata: + if input.cardano_metadata_bool: + if not os.path.exists(cardano_metadata_path): + os.makedirs(cardano_metadata_path) + create_cardano_metadata( + name, + order_num, + full_single_dna, + dna_dictionary, + metadata_material_dict, + input.custom_fields, + input.enable_custom_fields, + input.cardano_description, + cardano_metadata_path + ) + + if input.solana_metadata_bool: + if not os.path.exists(solana_metadata_path): + os.makedirs(solana_metadata_path) + createSolanaMetaData( + name, + order_num, + full_single_dna, + dna_dictionary, + metadata_material_dict, + input.custom_fields, + input.enable_custom_fields, + input.solana_description, + solana_metadata_path + ) + + if input.erc721_metadata: + if not os.path.exists(erc721_metadata_path): + os.makedirs(erc721_metadata_path) + create_erc721_meta_data( + name, + order_num, + full_single_dna, + dna_dictionary, + metadata_material_dict, + input.custom_fields, + input.enable_custom_fields, + input.erc721_description, + erc721_metadata_path + ) + + if not os.path.exists(bmnft_data_folder): + os.makedirs(bmnft_data_folder) + + for b in dna_dictionary: + if dna_dictionary[b] == "0": + dna_dictionary[b] = "Empty" + + meta_data_dict = { + "name": name, + "nft_dna": a, + "nft_variants": dna_dictionary, + "material_attributes": metadata_material_dict + } + + json_meta_data = json.dumps(meta_data_dict, indent=1, ensure_ascii=True) + + with open(os.path.join(bmnft_data_folder, "Data_" + name + ".json"), 'w') as outfile: + outfile.write(json_meta_data + '\n') + + log.info(f"{TextColors.OK}\nTIME [NFT {name} Generated]: {time.time() - time_start_2}s") + + save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batch_to_generate) + + x += 1 + + for i in hierarchy: + for j in hierarchy[i]: + bpy.data.collections[j].hide_render = False + bpy.data.collections[j].hide_viewport = False + + batch_complete_time = time.time() - time_start_1 + + log.info( + f"\nAll NFTs in Batch {input.batch_to_generate} successfully generated and saved at:" + f"\n{input.nft_batch_save_path}" + f"\nTIME [Batch {input.batch_to_generate} Generated]: {batch_complete_time}s\n" + ) + + batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1, + "Average time per generation": batch_complete_time / x - 1} + + batch_info_folder = os.path.join( + input.nft_batch_save_path, + "Batch" + str(input.batch_to_generate), + "batch_info.json" + ) + + save_batch(batch_info, batch_info_folder) + + # Send Email that Batch is complete: + if input.email_notification_bool: + port = 465 # For SSL + smtp_server = "smtp.gmail.com" + sender_email = input.sender_from # Enter your address + receiver_email = input.receiver_to # Enter receiver address + password = input.email_password + + # Get batch info for message: + if input.fail_state: + batch = input.fail_state + batch_data = get_batch_data(input.failed_batch, input.batch_json_save_path) + + else: + batch_data = get_batch_data(input.batch_to_generate, input.batch_json_save_path) + + batch = input.batch_to_generate + + generation_time = str(datetime.timedelta(seconds=batch_complete_time)) + + message = f"""\ + Subject: Batch {batch} completed {x - 1} NFTs in {generation_time} (h:m:s) + + Generation Time: + {generation_time.split(':')[0]} Hours, + {generation_time.split(':')[1]} Minutes, + {generation_time.split(':')[2]} Seconds + Batch Data: + + {batch_data} + + This message was sent from an instance of the Blend_My_NFTs Blender add-on. + """ + + context = ssl.create_default_context() + with smtplib.SMTP_SSL(smtp_server, port, context=context) as server: + server.login(sender_email, password) + server.sendmail(sender_email, receiver_email, message) + + # Automatic Shutdown: + # If user selects automatic shutdown but did not specify time after Batch completion + def shutdown(time): + if platform.system() == "Windows": + os.system(f"shutdown /s /t {time}") + if platform.system() == "Darwin": + os.system(f"shutdown /s /t {time}") + + if input.enable_auto_shutdown and not input.specify_time_bool: + shutdown(0) + + # If user selects automatic shutdown and specify time after Batch completion + if input.enable_auto_shutdown and input.specify_time_bool: + hours = (int(input.hours) / 60) / 60 + minutes = int(input.minutes) / 60 + total_sleep_time = hours + minutes + + # time.sleep(total_sleep_time) + + shutdown(total_sleep_time) diff --git a/main/get_combinations.py b/main/get_combinations.py deleted file mode 100644 index f1db7eb..0000000 --- a/main/get_combinations.py +++ /dev/null @@ -1,26 +0,0 @@ -import bpy - -from . import DNA_Generator - - -def get_combinations(): - """ - Returns "combinations", the number of all possible NFT DNA for a given Blender scene formatted to BMNFTs conventions - combinations. - """ - - hierarchy = DNA_Generator.get_hierarchy() - hierarchyByNum = [] - - for i in hierarchy: - # Ignore Collections with nothing in them - if len(hierarchy[i]) != 0: - hierarchyByNum.append(len(hierarchy[i])) - else: - print(f"The following collection has been identified as empty: {i}") - - combinations = 1 - for i in hierarchyByNum: - combinations = combinations*i - - return combinations diff --git a/main/HeadlessUtil.py b/main/headless_util.py similarity index 88% rename from main/HeadlessUtil.py rename to main/headless_util.py index 6b09303..dbe0f6e 100644 --- a/main/HeadlessUtil.py +++ b/main/headless_util.py @@ -1,11 +1,12 @@ -#adding CLI arguments -#Used this as a basis: -#https://developer.blender.org/diffusion/B/browse/master/release/scripts/templates_py/background_job.py +# adding CLI arguments +# Used this as a basis: +# https://developer.blender.org/diffusion/B/browse/master/release/scripts/templates_py/background_job.py import sys import argparse -def getPythonArgs(): + +def get_python_args(): argv = sys.argv @@ -15,7 +16,7 @@ def getPythonArgs(): argv = argv[argv.index("--") + 1:] # get all args after "--" usage_text = ( - "Run Blend_My_NFTs headlessly from the command line\n" + "Run Blend_My_NFTs heedlessly from the command line\n" "usage:\n" "blender -background --python -- --config-file " ) @@ -64,4 +65,4 @@ def getPythonArgs(): help="Overwrite the logic file path in the config file" ) - return (parser.parse_args(argv), parser) \ No newline at end of file + return parser.parse_args(argv), parser diff --git a/main/helpers.py b/main/helpers.py new file mode 100644 index 0000000..d0839cb --- /dev/null +++ b/main/helpers.py @@ -0,0 +1,553 @@ +import bpy +import os +import sys +import json +import copy +import logging +import tempfile +import platform +import traceback +from time import sleep +from itertools import cycle +from threading import Thread +from shutil import get_terminal_size +from collections import Counter, defaultdict + +log = logging.getLogger(__name__) + + +# ======== CONSTANTS ======== # + +# Constants are used for storing or updating constant values that may need to be changes depending on system +# requirements and different use-cases. + +removeList = [".gitignore", ".DS_Store", "desktop.ini", ".ini"] + + +def remove_file_by_extension(dirlist): + """ + Checks if a given directory list contains any of the files or file extensions listed above, if so, remove them + from list and return a clean dir list. These files interfere with BMNFTs operations and should be removed + whenever dealing with directories. + """ + + if str(type(dirlist)) == "": + dirlist = list(dirlist) # converts single string path to list if dir pasted as string + + return_dirs = [] + for directory in dirlist: + if not str(os.path.split(directory)[1]) in removeList: + return_dirs.append(directory) + + return return_dirs + + +# TODO: fix colours in console logs and find a way to include coloured text in .txt file. + +class TextColors: + """ + The colour of console messages. + """ + + OK = '\033[92m' # GREEN + WARNING = '\033[93m' # YELLOW + ERROR = '\033[91m' # RED + RESET = '\033[0m' # RESET COLOR + + +def save_result(result): + """ + Saves json result to json file at the specified path. + """ + file_name = "log.json" + if platform.system() == "Linux" or platform.system() == "Darwin": + path = os.path.join(os.path.join(os.path.expanduser('~')), 'Desktop', file_name) + + if platform.system() == "Windows": + path = os.path.join(os.environ["HOMEPATH"], "Desktop", file_name) + + data = json.dumps(result, indent=1, ensure_ascii=True) + with open(path, 'w') as outfile: + outfile.write(data + '\n') + + +# ======== GET COMBINATIONS ======== # + +# This section retrieves the Scene hierarchy from the current Blender file. + + +def get_hierarchy(): + """ + Returns the hierarchy of a given Blender scene. + """ + + coll = bpy.context.scene.collection + + script_ignore_collection = bpy.data.collections["Script_Ignore"] + + list_all_coll_in_scene = [] + list_all_collections = [] + + def traverse_tree(t): + yield t + for child in t.children: + yield from traverse_tree(child) + + for c in traverse_tree(coll): + list_all_coll_in_scene.append(c) + + for i in list_all_coll_in_scene: + list_all_collections.append(i.name) + + list_all_collections.remove(script_ignore_collection.name) + + if "Scene Collection" in list_all_collections: + list_all_collections.remove("Scene Collection") + + if "Master Collection" in list_all_collections: + list_all_collections.remove("Master Collection") + + def all_script_ignore(script_ignore_coll): + # Removes all collections, sub collections in Script_Ignore collection from list_all_collections. + + for collection in list(script_ignore_coll.children): + list_all_collections.remove(collection.name) + list_coll = list(collection.children) + if len(list_coll) > 0: + all_script_ignore(collection) + + all_script_ignore(script_ignore_collection) + list_all_collections.sort() + + exclude = ["_"] # Excluding characters that identify a Variant + attribute_collections = copy.deepcopy(list_all_collections) + + def filter_num(): + """ + This function removes items from 'attribute_collections' if they include values from the 'exclude' variable. + It removes child collections from the parent collections in from the "list_all_collections" list. + """ + for x in attribute_collections: + if any(i in x for i in exclude): + attribute_collections.remove(x) + + for i in range(len(list_all_collections)): + filter_num() + + attribute_variants = [x for x in list_all_collections if x not in attribute_collections] + attribute_collections1 = copy.deepcopy(attribute_collections) + + def attribute_data(att_vars): + """ + Creates a dictionary of each attribute + """ + all_att_data_list = {} + for i in att_vars: + # Check if name follows naming conventions: + if int(i.count("_")) > 2 and int(i.split("_")[1]) > 0: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"There is a naming issue with the following Attribute/Variant: '{i}'\n" + f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + raise Exception() + + try: + number = i.split("_")[1] + name = i.split("_")[0] + rarity = i.split("_")[2] + except IndexError: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"There is a naming issue with the following Attribute/Variant: '{i}'\n" + f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + raise Exception() + + all_att_data_list[i] = {"name": name, "number": number, "rarity": rarity} + return all_att_data_list + + variant_meta_data = attribute_data(attribute_variants) + + hierarchy = {} + for i in attribute_collections1: + col_par_long = list(bpy.data.collections[str(i)].children) + col_par_short = {} + for x in col_par_long: + col_par_short[x.name] = None + hierarchy[i] = col_par_short + + for a in hierarchy: + for b in hierarchy[a]: + for x in variant_meta_data: + if str(x) == str(b): + (hierarchy[a])[b] = variant_meta_data[x] + + return hierarchy + + +# ======== GET COMBINATIONS ======== # + +# This section is used to get the number of combinations for checks and the UI display + +def get_combinations(): + """ + Returns "combinations", the number of all possible NFT DNA for a given Blender scene formatted to BMNFTs conventions + combinations. + """ + + hierarchy = get_hierarchy() + hierarchy_by_num = [] + + for i in hierarchy: + # Ignore Collections with nothing in them + if len(hierarchy[i]) != 0: + hierarchy_by_num.append(len(hierarchy[i])) + else: + log.warning(f"\nThe following collection has been identified as empty: {i}") + + combinations = 1 + for i in hierarchy_by_num: + combinations = combinations * i + + return combinations + + +# ======== CHECKS ======== # + +# This section is used to check the NFTRecord.json for duplicate NFT DNA and returns any found in the console. +# It also checks the percentage each variant is chosen in the NFTRecord, then compares it with its rarity percentage +# set in the .blend file. + +# This section is provided for transparency. The accuracy of the rarity values you set in your .blend file as outlined +# in the README.md file are dependent on the maxNFTs, and the maximum number of combinations of your NFT collection. + +def check_scene(): # Not complete + """ + Checks if Blender file Scene follows the Blend_My_NFTs conventions. If not, raises error with all instances of + violations. + """ + + script_ignore_exists = None # True if Script_Ignore collection exists in Blender scene + attribute_naming_conventions = None # True if all attributes in Blender scene follow BMNFTs naming conventions + variant_naming_conventions = None # True if all variants in Blender scene follow BMNFTs naming conventions + object_placing_conventions = None # True if all objects are within either Script_Ignore or a variant collection + + # script_ignore_exists: + try: + scriptIgnoreCollection = bpy.data.collections["Script_Ignore"] + script_ignore_exists = True + except KeyError: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"Add a Script_Ignore collection to your Blender scene and ensure the name is exactly 'Script_Ignore'. " + f"For more information, see:" + f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{TextColors.RESET}" + ) + raise + + hierarchy = get_hierarchy() + collections = bpy.context.scene.collection + + # attribute_naming_conventions + + +def check_rarity(hierarchy, dna_list_formatted, save_path): + """Checks rarity percentage of each Variant, then sends it to RarityData.json in NFT_Data folder.""" + + dna_list = [list(i.keys())[0] for i in dna_list_formatted] + num_nfts_generated = len(dna_list) + num_dict = defaultdict(list) + hierarchy.keys() + + for i in dna_list: + dna_split_list = i.split("-") + + for j, k in zip(dna_split_list, hierarchy.keys()): + num_dict[k].append(j) + + num_dict = dict(num_dict) + + for i in num_dict: + count = dict(Counter(num_dict[i])) + num_dict[i] = count + + full_num_name = {} + + for i in hierarchy: + full_num_name[i] = {} + for j in hierarchy[i]: + variant_num = hierarchy[i][j]["number"] + + full_num_name[i][variant_num] = j + + complete_data = {} + + for i, j in zip(full_num_name, num_dict): + x = {} + for k in full_num_name[i]: + for l in num_dict[j]: + if l == k: + name = full_num_name[i][k] + num = num_dict[j][l] + x[name] = [(str(round(((num / num_nfts_generated) * 100), 2)) + "%"), str(num)] + + complete_data[i] = x + + # Saving Rarity data to console and log: + x = f"\nPercentages for each Variant per Attribute:" + for i in complete_data: + x += f"\n\n{i}:" + if complete_data[i]: + for j in complete_data[i]: + x += f"\n - {j}: {complete_data[i][j][0]} occurs {complete_data[i][j][1]} times." + else: + x += f"\n - Variants not selected." + + log.info(x) + + json_meta_data = json.dumps(complete_data, indent=1, ensure_ascii=True) + + with open(os.path.join(save_path, "RarityData.json"), 'w') as outfile: + outfile.write(json_meta_data + '\n') + path = os.path.join(save_path, "RarityData.json") + + log.info( + f"\nRarity data has been saved to:\n{path}" + ) + + +def check_duplicates(dna_list_formatted): + """Checks if there are duplicates in dna_list before NFTRecord.json is sent to JSON file.""" + dna_list = [] + for i in dna_list_formatted: + dna_list.append(list(i.keys())[0]) + + duplicates = 0 + seen = set() + + for x in dna_list: + if x in seen: + duplicates += 1 + seen.add(x) + + if duplicates > 0: + log.warning( + f"\n{TextColors.WARNING}Blend_My_NFTs Warning:\n" + f"{duplicates} duplicate NFT DNA was detected. This should not be possible. For more information, see:" + f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure" + f"\n{TextColors.RESET}" + ) + + log.info(f"\n\nDuplicate NFT DNA found: {duplicates}") + + +def check_failed_batches(batch_json_save_path): + fail_state = False + failed_batch = None + failed_dna = None + failed_dna_index = None + + if os.path.isdir(batch_json_save_path): + batch_folders = remove_file_by_extension(os.listdir(batch_json_save_path)) + + for i in batch_folders: + batch = json.load(open(os.path.join(batch_json_save_path, i))) + nfts_in_batch = batch["nfts_in_batch"] + if "Generation Save" in batch: + dna_generated = batch["Generation Save"][-1]["DNA Generated"] + if dna_generated is not None and dna_generated < nfts_in_batch: + fail_state = True + failed_batch = int(i.removeprefix("Batch").removesuffix(".json")) + failed_dna = dna_generated + + return fail_state, failed_batch, failed_dna, failed_dna_index + + +# Raise Errors: +def raise_error_num_batches(max_nfts, nfts_per_batch): + """Checks if number of Batches is less than maxNFTs, if not raises error.""" + + try: + num_batches = max_nfts / nfts_per_batch + return num_batches + except ZeroDivisionError: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"The number of NFTs per Batch must be greater than ZERO." + f"Please review your Blender scene and ensure it follows " + f"the naming conventions and scene structure. For more information, " + f"see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure" + f"\n{TextColors.RESET}" + ) + raise ZeroDivisionError() + + +def raise_error_zero_combinations(): + """Checks if combinations is greater than 0, if so, raises error.""" + if get_combinations() == 0: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"The number of all possible combinations is ZERO. Please review your Blender scene and ensure it " + f"follows the naming conventions and scene structure. For more information, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure" + f"\n{TextColors.RESET}" + ) + + raise ValueError() + + +def raise_error_num_batches_greater_then(num_batches): + if num_batches < 1: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"The number of Batches is less than 1. Please review your Blender scene and ensure it follows " + f"the naming conventions and scene structure. For more information, " + f"see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure" + f"\n{TextColors.RESET}" + ) + raise ValueError() + + +# Raise Warnings: +def raise_warning_max_nfts(nfts_per_batch, collection_size): + """ + Prints warning if nfts_per_batch is greater than collection_size. + """ + + if nfts_per_batch > collection_size: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.WARNING}Blend_My_NFTs Warning:\n" + f"The number of NFTs Per Batch you set is smaller than the NFT Collection Size you set." + f"\n{TextColors.RESET}" + ) + + raise ValueError() + + +def raise_warning_collection_size(dna_list, collection_size): + """ + Prints warning if BMNFTs cannot generate requested number of NFTs from a given collection_size. + """ + + if len(dna_list) < collection_size: + log.warning( + f"\n{traceback.format_exc()}" + f"\n{TextColors.WARNING} \nWARNING: \n" + f"Blend_My_NFTs cannot generate {collection_size} NFTs." + f" Only {len(dna_list)} NFT DNA were generated." + + f"\nThis might be for a number of reasons:" + f"\n a) Rarity is preventing combinations from being generated (See " + f"https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n " + f"\n b) Logic is preventing combinations from being generated (See " + f"https://github.com/torrinworx/Blend_My_NFTs#logic).\n " + f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or " + f"Attributes to increase the recommended collection size.\n " + f"\n{TextColors.RESET}" + ) + + +# ======== LOADING ANIMATION ======== # + +# This section is used for the loading animation used in the system console. + +class Loader: + def __init__(self, desc="Loading...", end="Done!", timeout=0.1): + """ + A loader-like context manager + + Args: + desc (str, optional): The loader's description. Defaults to "Loading...". + end (str, optional): Final print. Defaults to "Done!". + timeout (float, optional): Sleep time between prints. Defaults to 0.1. + """ + self.desc = desc + self.end = end + self.timeout = timeout + + self._thread = Thread(target=self._animate, daemon=True) + self.steps = [ + " [== ]", + " [ == ]", + " [ == ]", + " [ == ]", + " [ == ]", + " [ ==]", + " [ == ]", + " [ == ]", + " [ == ]", + " [ == ]", + ] + self.done = False + + def start(self): + self._thread.start() + return self + + def _animate(self): + for c in cycle(self.steps): + if self.done: + break + print(f"\r{self.desc} {c}", flush=True, end="") + sleep(self.timeout) + + def __enter__(self): + self.start() + + def stop(self): + self.done = True + cols = get_terminal_size((80, 20)).columns + print("\r" + " " * cols, end="", flush=True) + print(f"\r{self.end}", flush=True) + + def __exit__(self, exc_type, exc_value, tb): + # handle exceptions with those variables ^ + self.stop() + + +def activate_logging(): + """ + Used as an intermediate activated at runtime of the following operators: CreateData, ExportNFTs, ResumeFailedBatch, + RefactorBatches, and ExportSettings. Must be independent of 'input' class to be safe, gets variables directly from + bpy. + """ + + log_path = bpy.context.scene.input_tool.log_path + if log_path: + file_handler = logging.FileHandler(os.path.join(log_path, 'BMNFTs_Log.txt'), 'a') + else: + file_handler = logging.FileHandler(os.path.join(tempfile.gettempdir(), 'BMNFTs_Log.txt'), 'a') + + formatter = logging.Formatter( + '[%(asctime)s] [%(levelname)s] [%(filename)s > %(funcName)s() > Line:%(lineno)d]\n%(message)s\n' + ) + file_handler.setFormatter(formatter) + + log = logging.getLogger() + for handler in log.handlers[:]: + if isinstance(handler, logging.FileHandler): + log.removeHandler(handler) + if isinstance(handler, logging.StreamHandler): + log.removeHandler(handler) + log.addHandler(file_handler) + + # Record log to console: + console_handler = logging.StreamHandler(sys.stdout) + log.addHandler(console_handler) + + if bpy.context.scene.input_tool.enable_debug: + logging.getLogger().setLevel(logging.DEBUG) + else: + logging.getLogger().setLevel(logging.INFO) diff --git a/main/intermediate.py b/main/intermediate.py new file mode 100644 index 0000000..e44ffc7 --- /dev/null +++ b/main/intermediate.py @@ -0,0 +1,95 @@ +import logging + +import bpy +import json + +from main import dna_generator, exporter + +log = logging.getLogger(__name__) + +# TODO: migrate this code to the dna_generator.py(send_to_record) and exporter.py(render_and_save) to simplify render +# process into one file. + + +def send_to_record(input, reverse_order=False): + if input.enable_logic: + if input.enable_logic_json and input.logic_file: + input.logic_file = json.load(open(input.logic_file)) + + if input.enable_logic_json and not input.logic_file: + log.error( + f"No Logic.json file path set. Please set the file path to your Logic.json file." + ) + raise + + if not input.enable_logic_json: + scn = bpy.context.scene + if reverse_order: + input.logic_file = {} + num = 1 + for i in range(scn.logic_fields_index, -1, -1): + item = scn.logic_fields[i] + + item_list1 = item.item_list1 + rule_type = item.rule_type + item_list2 = item.item_list2 + input.logic_file[f"Rule-{num}"] = { + "IF": item_list1.split(','), + rule_type: item_list2.split(',') + } + num += 1 + else: + input.logic_file = {} + num = 1 + for item in scn.logic_fields: + item_list1 = item.item_list1 + rule_type = item.rule_type + item_list2 = item.item_list2 + input.logic_file[f"Rule-{num}"] = { + "IF": item_list1.split(','), + rule_type: item_list2.split(',') + } + num += 1 + + dna_generator.send_to_record( + input.collection_size, + input.nfts_per_batch, + input.save_path, + input.enable_rarity, + input.enable_logic, + input.logic_file, + input.enable_materials, + input.materials_file, + input.blend_my_nfts_output, + input.batch_json_save_path, + input.enable_debug, + input.log_path + ) + + +def render_and_save_nfts(input, reverse_order=False): + if input.enable_custom_fields: + scn = bpy.context.scene + if reverse_order: + for i in range(scn.custom_metadata_fields_index, -1, -1): + item = scn.custom_metadata_fields[i] + if item.field_name in list(input.custom_fields.keys()): + log.error( + f"A duplicate of '{item.field_name}' was found. Ensure all Custom Metadata field " + f"Names are unique." + ) + raise ValueError() + else: + input.custom_fields[item.field_name] = item.field_value + else: + for item in scn.custom_metadata_fields: + if item.field_name in list(input.custom_fields.keys()): + log.error( + f"A duplicate of '{item.field_name}' was found. Ensure all Custom Metadata field " + f"Names are unique." + ) + raise ValueError() + else: + input.custom_fields[item.field_name] = item.field_value + + exporter.render_and_save_nfts(input) diff --git a/main/loading_animation.py b/main/loading_animation.py deleted file mode 100644 index d97e69d..0000000 --- a/main/loading_animation.py +++ /dev/null @@ -1,69 +0,0 @@ -from itertools import cycle -from shutil import get_terminal_size -from threading import Thread -from time import sleep - - -class Loader: - def __init__(self, desc="Loading...", end="Done!", timeout=0.1): - """ - A loader-like context manager - - Args: - desc (str, optional): The loader's description. Defaults to "Loading...". - end (str, optional): Final print. Defaults to "Done!". - timeout (float, optional): Sleep time between prints. Defaults to 0.1. - """ - self.desc = desc - self.end = end - self.timeout = timeout - - self._thread = Thread(target=self._animate, daemon=True) - self.steps = [ - " [== ]", - " [ == ]", - " [ == ]", - " [ == ]", - " [ == ]", - " [ ==]", - " [ == ]", - " [ == ]", - " [ == ]", - " [ == ]", - ] - self.done = False - - def start(self): - self._thread.start() - return self - - def _animate(self): - for c in cycle(self.steps): - if self.done: - break - print(f"\r{self.desc} {c}", flush=True, end="") - sleep(self.timeout) - - def __enter__(self): - self.start() - - def stop(self): - self.done = True - cols = get_terminal_size((80, 20)).columns - print("\r" + " " * cols, end="", flush=True) - print(f"\r{self.end}", flush=True) - - def __exit__(self, exc_type, exc_value, tb): - # handle exceptions with those variables ^ - self.stop() - - -if __name__ == "__main__": - with Loader("Loading with context manager..."): - for i in range(10): - sleep(0.25) - - loader = Loader("Loading with object...", "That was fast!", 0.05).start() - for i in range(10): - sleep(0.25) - loader.stop() diff --git a/main/Logic.py b/main/logic.py similarity index 57% rename from main/Logic.py rename to main/logic.py index d425c70..17331f2 100644 --- a/main/Logic.py +++ b/main/logic.py @@ -1,19 +1,23 @@ # Purpose: -# The purpose of this file is to add logic and rules to the DNA that are sent to the NFTRecord.json file in DNA_Generator.py +# The purpose of this file is to add logic and rules to the DNA that are sent to the NFTRecord.json file in +# dna_generator.py -import bpy import random +import logging +import traceback import collections -from .Constants import bcolors, removeList, remove_file_by_extension, save_result +from .helpers import TextColors + +log = logging.getLogger(__name__) -def reconstructDNA(deconstructedDNA): - reconstructed_DNA = "" - for a in deconstructedDNA: +def reconstruct_dna(deconstructed_dna): + reconstructed_dna = "" + for a in deconstructed_dna: num = "-" + str(a) - reconstructed_DNA += num - return ''.join(reconstructed_DNA.split('-', 1)) + reconstructed_dna += num + return ''.join(reconstructed_dna.split('-', 1)) def get_var_info(variant, hierarchy): @@ -33,11 +37,11 @@ def get_var_info(variant, hierarchy): return [name, order_number, rarity_number, attribute, attribute_index] # list of Var info sent back -def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type, enableRarity): - # Check if Variants in if_dict are in deconstructed_DNA, if so return if_list_selected = True: +def apply_rules_to_dna(hierarchy, deconstructed_dna, if_dict, result_dict, result_dict_type, enable_rarity): + # Check if Variants in if_dict are in deconstructed_dna, if so return if_list_selected = True: if_list_selected = False - for a in deconstructed_DNA: - attribute_index = deconstructed_DNA.index(a) + for a in deconstructed_dna: + attribute_index = deconstructed_dna.index(a) attribute = list(hierarchy.keys())[attribute_index] for b in hierarchy[attribute]: @@ -49,23 +53,23 @@ def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, resul if_list_selected = True # Apply changes in accordance to Variants in 'result_dict' and 'if_list_selected' bool above: - for a in deconstructed_DNA: - attribute_index = deconstructed_DNA.index(a) + for a in deconstructed_dna: + attribute_index = deconstructed_dna.index(a) attribute = list(hierarchy.keys())[attribute_index] if attribute in result_dict: # Check if Attribute from DNA is in 'result_dict' # If 'a' is a full Attribute and Variants in if_dict not selected, set 'a' to empty (0): if list(result_dict[attribute].keys()) == list(hierarchy[attribute].keys()) and not if_list_selected: - deconstructed_DNA[attribute_index] = "0" + deconstructed_dna[attribute_index] = "0" # If 'a' is a full Attribute and result_dict_type = "NOT", set 'a' to empty (0): if list(result_dict[attribute].keys()) == list( hierarchy[attribute].keys()) and if_list_selected and result_dict_type == "NOT": - deconstructed_DNA[attribute_index] = "0" + deconstructed_dna[attribute_index] = "0" - # If Variants in if_dict are selected, set each attribute in 'result_dict' to a random or rarity selected Variant from - # 'result_dict[attribute]' variant_list: + # If Variants in if_dict are selected, set each attribute in 'result_dict' to a random or rarity selected Variant + # from 'result_dict[attribute]' variant_list: if if_list_selected: # Invert 'items_returned' if 'NOT' rule is selected: @@ -91,60 +95,64 @@ def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, resul if attribute in result_dict: # Check if Attribute from DNA is in 'then_dict' - number_List_Of_i = [] - rarity_List_Of_i = [] - ifZeroBool = None - variantNum = None + number_list_of_i = [] + rarity_list_of_i = [] + if_zero_bool = None + variant_num = None for b in variant_list: number = b.split("_")[1] rarity = b.split("_")[2] - number_List_Of_i.append(int(number)) - rarity_List_Of_i.append(float(rarity)) + number_list_of_i.append(int(number)) + rarity_list_of_i.append(float(rarity)) - for b in rarity_List_Of_i: + for b in rarity_list_of_i: if b == 0: - ifZeroBool = True + if_zero_bool = True elif b != 0: - ifZeroBool = False + if_zero_bool = False - if enableRarity: + if enable_rarity: try: - if ifZeroBool: - variantNum = random.choices(number_List_Of_i, k=1) - elif not ifZeroBool: - variantNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1) + if if_zero_bool: + variant_num = random.choices(number_list_of_i, k=1) + elif not if_zero_bool: + variant_num = random.choices(number_list_of_i, weights=rarity_list_of_i, k=1) except IndexError: - raise IndexError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Attribute collection '{a}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"An issue was found within the Attribute collection '{a}'. For more information on " + f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) + raise IndexError() else: try: - variantNum = random.choices(number_List_Of_i, k=1) + variant_num = random.choices(number_list_of_i, k=1) except IndexError: - raise IndexError( - f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n" - f"An issue was found within the Attribute collection '{a}'. For more information on Blend_My_NFTs compatible scenes, " - f"see:\n{bcolors.RESET}" - f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"An issue was found within the Attribute collection '{a}'. For more information on " + f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" ) - deconstructed_DNA[int(attribute_index)] = str(variantNum[0]) + raise IndexError() + deconstructed_dna[int(attribute_index)] = str(variant_num[0]) - return deconstructed_DNA + return deconstructed_dna -def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type): - # Check if Variants in 'if_dict' found in deconstructed_DNA: - if_bool = False # True if Variant in 'deconstructed_DNA' found in 'if_dict' +def get_rule_break_type(hierarchy, deconstructed_dna, if_dict, result_dict, result_dict_type): + # Check if Variants in 'if_dict' found in deconstructed_dna: + if_bool = False # True if Variant in 'deconstructed_dna' found in 'if_dict' for a in if_dict: # Attribute in 'if_dict' for b in if_dict[a]: # Variant in if_dict[Attribute] var_order_num = str(if_dict[a][b][1]) # Order number of 'b' (Variant) dna_order_num = str( - deconstructed_DNA[if_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_DNA + deconstructed_dna[if_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_dna if var_order_num == dna_order_num: # If DNA selected Variants found inside IF list variants: if_bool = True @@ -153,14 +161,14 @@ def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, resu continue break - # Check if Variants in 'result_dict' found in deconstructed_DNA: + # Check if Variants in 'result_dict' found in deconstructed_dna: full_att_bool = False - result_bool = False # True if Variant in 'deconstructed_DNA' found in 'result_dict' + result_bool = False # True if Variant in 'deconstructed_dna' found in 'result_dict' for a in result_dict: # Attribute in 'result_dict' for b in result_dict[a]: # Variant in if_dict[Attribute] var_order_num = str(result_dict[a][b][1]) # Order number of 'b' (Variant) dna_order_num = str( - deconstructed_DNA[result_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_DNA + deconstructed_dna[result_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_dna if var_order_num == dna_order_num: # If DNA selected Variants found inside THEN list variants: if list(result_dict[a].keys()) == list(hierarchy[a].keys()): full_att_bool = True @@ -173,20 +181,20 @@ def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, resu # Rule Bool return summary: violates_rule = False - # If Variants in 'if_dict' found in deconstructed_DNA and Variants in 'result_dict' not found in deconstructed_DNA: + # If Variants in 'if_dict' found in deconstructed_dna and Variants in 'result_dict' not found in deconstructed_dna: if if_bool and not result_bool: violates_rule = True elif if_bool and result_bool and result_dict_type == "NOT": violates_rule = True - # If Variants in 'if_dict' not found in deconstructed_DNA, and 'result_dict' variants are found in deconstructed_DNA, - # and they are a part of a full Attribute in 'then_dict' + # If Variants in 'if_dict' not found in deconstructed_dna, and 'result_dict' variants are found in + # deconstructed_dna, and they are a part of a full Attribute in 'then_dict' elif not if_bool and result_bool and full_att_bool: violates_rule = True - # If Variants in 'if_dict' not found in deconstructed_DNA, but Variants in 'then_dict' are found in deconstructed_DNA, - # and don't make up a full Attribute: + # If Variants in 'if_dict' not found in deconstructed_dna, but Variants in 'then_dict' are found in + # deconstructed_dna, and don't make up a full Attribute: # elif not if_bool and result_bool and not full_att_bool: # violates_rule = False @@ -194,7 +202,9 @@ def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, resu def create_dicts(hierarchy, rule_list_items, result_dict_type): - # Example of output structure: + """ + Example of output structure: + structure = { "attribute1": { "variant1": [ @@ -229,6 +239,7 @@ def create_dicts(hierarchy, rule_list_items, result_dict_type): ] } } + """ items_returned = collections.defaultdict(dict) for a in rule_list_items: @@ -249,41 +260,50 @@ def create_dicts(hierarchy, rule_list_items, result_dict_type): return dict(items_returned) -def logicafyDNAsingle(hierarchy, singleDNA, logicFile, enableRarity, enableMaterials): - deconstructed_DNA = singleDNA.split("-") - didReconstruct = True - originalDNA = str(singleDNA) +def logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity): + deconstructed_dna = single_dna.split("-") + did_reconstruct = True + original_dna = str(single_dna) - while didReconstruct: - didReconstruct = False - for rule in logicFile: + while did_reconstruct: + did_reconstruct = False + for rule in logic_file: # Items from 'IF' key for a given rule - if_dict = create_dicts(hierarchy, logicFile[rule]["IF"], "IF") + if_dict = create_dicts(hierarchy, logic_file[rule]["IF"], "IF") result_dict_type = "" - if "THEN" in logicFile[rule]: + if "THEN" in logic_file[rule]: result_dict_type = "THEN" - if "NOT" in logicFile[rule]: + if "NOT" in logic_file[rule]: result_dict_type = "NOT" - result_dict = create_dicts(hierarchy, logicFile[rule][result_dict_type], result_dict_type) + result_dict = create_dicts(hierarchy, logic_file[rule][result_dict_type], result_dict_type) # Change 'then_bool' to 'result_bool' - violates_rule, if_bool, then_bool, full_att_bool = get_rule_break_type(hierarchy, deconstructed_DNA, - if_dict, result_dict, - result_dict_type) + violates_rule, if_bool, then_bool, full_att_bool = get_rule_break_type( + hierarchy, + deconstructed_dna, + if_dict, + result_dict, + result_dict_type, + ) if violates_rule: - # print(f"======={deconstructed_DNA} VIOLATES RULE======") + log.debug(f"======={deconstructed_dna} VIOLATES RULE======") - deconstructed_DNA = apply_rules_to_dna( - hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type, enableRarity + deconstructed_dna = apply_rules_to_dna( + hierarchy, + deconstructed_dna, + if_dict, + result_dict, + result_dict_type, + enable_rarity ) - newDNA = reconstructDNA(deconstructed_DNA) - if newDNA != originalDNA: - originalDNA = str(newDNA) - didReconstruct = True + new_dna = reconstruct_dna(deconstructed_dna) + if new_dna != original_dna: + original_dna = str(new_dna) + did_reconstruct = True break - return str(reconstructDNA(deconstructed_DNA)) + return str(reconstruct_dna(deconstructed_dna)) diff --git a/main/material_generator.py b/main/material_generator.py new file mode 100644 index 0000000..9a6374f --- /dev/null +++ b/main/material_generator.py @@ -0,0 +1,146 @@ +# Purpose: +# The purpose of this file is to apply the materials a user sets in a given .json file to the Variant collection objects +# also specified in the .json file. The Materialized DNA is then returned in the following format: 1-1-1:1-1-1 +# Where the numbers right of the ":" are the material numbers applied to the respective Variants to the left of the ":" + +import json +import random +import logging +import traceback +from .helpers import TextColors + +log = logging.getLogger(__name__) + + +def select_material(material_list, variant, enable_rarity): + """Selects a material from a passed material list. """ + material_list_of_i = [] # List of Material names instead of order numbers + rarity_list_of_i = [] + if_zero_bool = None + + for material in material_list: + # Material Order Number comes from index in the Material List in materials.json for a given Variant. + # material_order_num = list(material_list.keys()).index(material) + + material_list_of_i.append(material) + + material_rarity_percent = material_list[material] + rarity_list_of_i.append(float(material_rarity_percent)) + + # print(f"MATERIAL_LIST_OF_I:{material_list_of_i}") + # print(f"RARITY_LIST_OF_I:{rarity_list_of_i}") + + for b in rarity_list_of_i: + if b == 0: + if_zero_bool = True + elif b != 0: + if_zero_bool = False + + if enable_rarity: + try: + if if_zero_bool: + selected_material = random.choices(material_list_of_i, k=1) + elif not if_zero_bool: + selected_material = random.choices(material_list_of_i, weights=rarity_list_of_i, k=1) + except IndexError: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"An issue was found within the Material List of the Variant collection '{variant}'. For more " + f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + raise IndexError() + else: + try: + selected_material = random.choices(material_list_of_i, k=1) + except IndexError: + log.error( + f"\n{traceback.format_exc()}" + f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n" + f"An issue was found within the Material List of the Variant collection '{variant}'. For more " + f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}" + f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n" + ) + raise IndexError() + + return selected_material[0], material_list + + +def get_variant_att_index(variant, hierarchy): + variant_attribute = None + + for attribute in hierarchy: + for variant_h in hierarchy[attribute]: + if variant_h == variant: + variant_attribute = attribute + + attribute_index = list(hierarchy.keys()).index(variant_attribute) + variant_order_num = variant.split("_")[1] + return attribute_index, variant_order_num + + +def match_dna_to_variant(hierarchy, single_dna): + """ + Matches each DNA number separated by "-" to its attribute, then its variant. + """ + + list_attributes = list(hierarchy.keys()) + list_dna_decunstructed = single_dna.split('-') + dna_dictionary = {} + + for i, j in zip(list_attributes, list_dna_decunstructed): + dna_dictionary[i] = j + + for x in dna_dictionary: + for k in hierarchy[x]: + k_num = hierarchy[x][k]["number"] + if k_num == dna_dictionary[x]: + dna_dictionary.update({x: k}) + return dna_dictionary + + +def apply_materials(hierarchy, single_dna, materials_file, enable_rarity): + """ + DNA with applied material example: "1-1:1-1" : + + The Material DNA will select the material for the Variant order number in the NFT DNA based on the Variant Material + list in the Variant_Material.json file. + """ + + single_dna_dict = match_dna_to_variant(hierarchy, single_dna) + materials_file = json.load(open(materials_file)) + deconstructed_material_dna = {} + + for a in single_dna_dict: + complete = False + for b in materials_file: + if single_dna_dict[a] == b: + material_name, material_list, = select_material(materials_file[b]['Material List'], b, enable_rarity) + + # Gets the Order Number of the Material + material_order_num = list(material_list.keys()).index(material_name) + + deconstructed_material_dna[a] = str(material_order_num + 1) + complete = True + if not complete: + deconstructed_material_dna[a] = "0" + + # This section is now incorrect and needs updating: + + # Make Attributes have the same materials: + # Order your Attributes alphabetically, then assign each Attribute a number, starting with 0. So Attribute 'A' = 0, + # Attribute 'B' = 1, 'C' = 2, 'D' = 3, etc. For each pair you want to equal another, add its number it to this list: + # synced_material_attributes = [1, 2] + # + # first_mat = deconstructed_material_dna[synced_material_attributes[0]] + # for i in synced_material_attributes: + # deconstructed_material_dna[i] = first_mat + + material_dna = "" + for a in deconstructed_material_dna: + num = "-" + str(deconstructed_material_dna[a]) + material_dna += num + material_dna = ''.join(material_dna.split('-', 1)) + + return f"{single_dna}:{material_dna}" diff --git a/main/metadata_templates.py b/main/metadata_templates.py new file mode 100644 index 0000000..1aa6210 --- /dev/null +++ b/main/metadata_templates.py @@ -0,0 +1,191 @@ +# Some code in this file was generously sponsored by the amazing team over at SolSweepers! +# Feel free to check out their amazing project and see how they are using Blend_My_NFTs: +# https://discord.gg/QTT7dzcuVs + +# Purpose: +# This file returns the specified metadata format to the exporter.py for a given NFT DNA. + +import os +import json + + +def send_metadata_to_json(meta_data_dict, save_path, file_name): + json_metadata = json.dumps(meta_data_dict, indent=1, ensure_ascii=True) + with open(os.path.join(save_path, f"{file_name}.json"), 'w') as outfile: + outfile.write(json_metadata + '\n') + + +def strip_nums(variant): + variant = str(variant).split('_')[0] + return variant + + +# Cardano Template +def create_cardano_metadata( + name, + order_num, + nft_dna, + nft_variants, + material_attributes, + custom_fields, + enable_custom_fields, + cardano_description, + cardano_metadata_path +): + + meta_data_dict_cardano = {"721": { + "": { + name: { + "name": name, + "image": "", + "mediaType": "", + "description": cardano_description, + } + }, + "version": "1.0" + }} + + # Variants and Attributes: + for i in nft_variants: + meta_data_dict_cardano["721"][""][name][i] = strip_nums(nft_variants[i]) + + # Material Variants and Attributes: + for i in material_attributes: + meta_data_dict_cardano["721"][""][name][i] = material_attributes[i] + + # Custom Fields: + if enable_custom_fields: + for i in custom_fields: + meta_data_dict_cardano["721"][""][name][i] = custom_fields[i] + + send_metadata_to_json( + meta_data_dict_cardano, + cardano_metadata_path, + name + ) + + +# Solana Template +def createSolanaMetaData( + name, + order_num, + nft_dna, + nft_variants, + material_attributes, + custom_fields, + enable_custom_fields, + solana_description, + solana_metadata_path +): + metadata_dict_solana = { + "name": name, + "symbol": "", + "description": solana_description, + "seller_fee_basis_points": None, + "image": "", + "animation_url": "", + "external_url": "" + } + + attributes = [] + + # Variant and Attributes: + for i in nft_variants: + dictionary = { + "trait_type": i, + "value": strip_nums(nft_variants[i]) + } + attributes.append(dictionary) + + # Material Variants and Attributes: + for i in material_attributes: + dictionary = { + "trait_type": i, + "value": material_attributes[i] + } + attributes.append(dictionary) + + # Custom Fields: + if enable_custom_fields: + for i in custom_fields: + dictionary = { + "trait_type": i, + "value": custom_fields[i] + } + attributes.append(dictionary) + + metadata_dict_solana["attributes"] = attributes + metadata_dict_solana["collection"] = { + "name": "", + "family": "" + } + + metadata_dict_solana["properties"] = { + "files": [{"uri": "", "type": ""}], + "category": "", + "creators": [{"address": "", "share": None}] + } + + send_metadata_to_json( + metadata_dict_solana, + solana_metadata_path, + name + ) + + +# ERC721 Template +def create_erc721_meta_data( + name, + order_num, + nft_dna, + nft_variants, + material_attributes, + custom_fields, + enable_custom_fields, + erc721_description, + erc721_metadata_path +): + + metadata_dict_erc721 = { + "name": name, + "description": erc721_description, + "image": "", + "attributes": None, + } + + attributes = [] + + # Variants and Attributes: + for i in nft_variants: + dictionary = { + "trait_type": i, + "value": strip_nums(nft_variants[i]) + } + + attributes.append(dictionary) + + # Material Variants and Attributes: + for i in material_attributes: + dictionary = { + "trait_type": i, + "value": material_attributes[i] + } + + attributes.append(dictionary) + + # Custom Fields: + if enable_custom_fields: + for i in custom_fields: + dictionary = { + "trait_type": i, + "value": custom_fields[i] + } + attributes.append(dictionary) + + metadata_dict_erc721["attributes"] = attributes + + send_metadata_to_json( + metadata_dict_erc721, + erc721_metadata_path, + name + ) diff --git a/main/refactorer.py b/main/refactorer.py new file mode 100644 index 0000000..41350b7 --- /dev/null +++ b/main/refactorer.py @@ -0,0 +1,49 @@ +# Purpose: +# This file goes through all batches, renames, and sorts all nft files to a Complete_Collection folder in Blend_My_NFTs + +import os +import json +import shutil +import logging + +from .helpers import remove_file_by_extension + +log = logging.getLogger(__name__) + + +def reformat_nft_collection(refactor_panel_input): + complete_coll_path = os.path.join(refactor_panel_input.save_path, "Blend_My_NFTs Output", "Complete_Collection") + + if not os.path.exists(complete_coll_path): + os.mkdir(complete_coll_path) + + batch_list_dirty = os.listdir(refactor_panel_input.nft_batch_save_path) + batch_list = remove_file_by_extension(batch_list_dirty) + collection_info = {"Total Time": 0} + + for folder in batch_list: + batch_info = json.load(open(os.path.join(refactor_panel_input.nft_batch_save_path, folder, "batch_info.json"))) + collection_info[os.path.basename(folder)] = batch_info + collection_info["Total Time"] = collection_info["Total Time"] + batch_info["Batch Render Time"] + + file_list_dirty = os.listdir(os.path.join(refactor_panel_input.nft_batch_save_path, folder)) + filelist = remove_file_by_extension(file_list_dirty) + + for mediaTypeFolder in filelist: + if mediaTypeFolder != "batch_info.json": + media_type_folder_dir = os.path.join(refactor_panel_input.nft_batch_save_path, folder, mediaTypeFolder) + + for i in os.listdir(media_type_folder_dir): + destination = os.path.join(complete_coll_path, mediaTypeFolder) + if not os.path.exists(destination): + os.makedirs(destination) + + shutil.move(os.path.join(media_type_folder_dir, i), destination) + + collection_info = json.dumps(collection_info, indent=1, ensure_ascii=True) + with open(os.path.join(complete_coll_path, "collection_info.json"), 'w') as outfile: + outfile.write(collection_info + '\n') + + log.info(f"All NFT files stored and sorted to the Complete_Collection folder in {refactor_panel_input.save_path}") + + shutil.rmtree(refactor_panel_input.nft_batch_save_path)