diff --git a/__init__.py b/__init__.py index 5655103..e4ee8c0 100644 --- a/__init__.py +++ b/__init__.py @@ -17,15 +17,17 @@ LAST_UPDATED = "01:02PM, Aug 24th, 2022" # ======== Import handling ======== # +# Blender modules: import bpy from bpy.app.handlers import persistent -from bpy.props import (IntProperty, - BoolProperty, - CollectionProperty) +from bpy.props import (IntProperty, BoolProperty, CollectionProperty) + # Python modules: import os import sys import json +import logging +import tempfile import importlib import traceback from typing import Any @@ -35,7 +37,7 @@ from datetime import datetime, timezone # "a little hacky bs" - matt159 ;) sys.path.append(os.path.dirname(os.path.realpath(__file__))) -# Local file imports: +# Local modules: from main import \ helpers, \ dna_generator, \ @@ -51,6 +53,7 @@ from UILists import \ custom_metadata_ui_list, \ logic_ui_list +# Refresh Locals for development: if "bpy" in locals(): modules = { "helpers": helpers, @@ -79,7 +82,7 @@ dt = datetime.now(timezone.utc).astimezone() # Date Time in UTC local @persistent -def Refresh_UI(dummy1, dummy2): +def refresh_ui(dummy1, dummy2): """ Refreshes the UI upon user interacting with Blender (using depsgraph_update_post handler). Might be a better handler to use. @@ -106,7 +109,7 @@ def Refresh_UI(dummy1, dummy2): redraw_panel(refresh_panel_classes) -bpy.app.handlers.depsgraph_update_post.append(Refresh_UI) +bpy.app.handlers.depsgraph_update_post.append(refresh_ui) # ======== Defining BMNFTs Data ======== # @@ -164,6 +167,8 @@ class BMNFTData: enable_debug: bool log_path: str + enable_dry_run: str + custom_fields: dict = None fail_state: Any = False failed_batch: Any = None @@ -174,13 +179,14 @@ class BMNFTData: self.custom_fields = {} -def getBMNFTData(): +def get_bmnft_data(): _save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) _Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path) + # IMPORTANT: if a new directory variable is ever added, use 'bpy.path.abspath' instead of 'os.path.abspath'. data = BMNFTData( nft_name=bpy.context.scene.input_tool.nft_name, - save_path=_save_path, + save_path=bpy.path.abspath(_save_path), # Converting from Blender's relative path system to absolute. nfts_per_batch=bpy.context.scene.input_tool.nfts_per_batch, batch_to_generate=bpy.context.scene.input_tool.batch_to_generate, collection_size=bpy.context.scene.input_tool.collection_size, @@ -193,7 +199,7 @@ def getBMNFTData(): enable_logic=bpy.context.scene.input_tool.enable_logic, enable_logic_json=bpy.context.scene.input_tool.enable_logic_json, - logic_file=bpy.context.scene.input_tool.logic_file, + logic_file=bpy.path.abspath(bpy.context.scene.input_tool.logic_file), enable_images=bpy.context.scene.input_tool.image_bool, image_file_format=bpy.context.scene.input_tool.image_enum, @@ -229,7 +235,9 @@ def getBMNFTData(): receiver_to=bpy.context.scene.input_tool.receiver_to, enable_debug=bpy.context.scene.input_tool.enable_debug, - log_path=bpy.context.scene.input_tool.log_path, + log_path=bpy.path.abspath(bpy.context.scene.input_tool.log_path), + + enable_dry_run=bpy.context.scene.input_tool.enable_dry_run ) return data @@ -252,7 +260,7 @@ def make_directories(save_path): return Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path -def runAsHeadless(): +def run_as_headless(): """ For use when running from the command line. """ @@ -313,26 +321,26 @@ def runAsHeadless(): settings.collection_size = int(pairs[1][1]) settings.nfts_per_batch = int(pairs[2][1]) settings.save_path = pairs[3][1] - settings.enable_rarity = pairs[4][1]=='True' - settings.enable_logic = pairs[5][1]=='True' + settings.enable_rarity = pairs[4][1] == 'True' + settings.enable_logic = pairs[5][1] == 'True' settings.enableLogicJson = pairs[6][1] == 'True' settings.logic_file = pairs[7][1] - settings.image_bool = pairs[8][1]=='True' + settings.image_bool = pairs[8][1] == 'True' settings.image_enum = pairs[9][1] - settings.animation_bool = pairs[10][1]=='True' + settings.animation_bool = pairs[10][1] == 'True' settings.animation_enum = pairs[11][1] - settings.model_bool = pairs[12][1]=='True' + settings.model_bool = pairs[12][1] == 'True' settings.model_enum = pairs[13][1] settings.batch_to_generate = int(pairs[14][1]) - settings.cardano_metadata_bool = pairs[15][1]=='True' + settings.cardano_metadata_bool = pairs[15][1] == 'True' settings.cardano_description = pairs[16][1] - settings.erc721_metadata = pairs[17][1]=='True' + settings.erc721_metadata = pairs[17][1] == 'True' settings.erc721_description = pairs[18][1] - settings.solana_metadata_bool = pairs[19][1]=='True' + settings.solana_metadata_bool = pairs[19][1] == 'True' settings.solanaDescription = pairs[20][1] - settings.enable_custom_fields = pairs[21][1]=='True' + settings.enable_custom_fields = pairs[21][1] == 'True' settings.custom_fields_file = pairs[22][1] - settings.enable_materials = pairs[23][1]=='True' + settings.enable_materials = pairs[23][1] == 'True' settings.materials_file = pairs[24][1] if args.save_path: @@ -341,7 +349,7 @@ def runAsHeadless(): if args.batch_number: settings.batch_to_generate = args.batch_number - input = getBMNFTData() + input = get_bmnft_data() if args.batch_data_path: input.batch_json_save_path = args.batch_data_path @@ -356,6 +364,42 @@ def runAsHeadless(): refactorer.reformat_nft_collection(input) +def activate_logging(): + """ + Used as an intermediate activated at runtime of the following operators: CreateData, ExportNFTs, ResumeFailedBatch, + RefactorBatches, and ExportSettings. Must be independent of 'input' class to be safe, gets variables directly from + bpy. + """ + + log_path = bpy.context.scene.input_tool.log_path + if log_path: + file_handler = logging.FileHandler(os.path.join(log_path, 'BMNFTs_Log.txt'), 'a') + else: + file_handler = logging.FileHandler(os.path.join(tempfile.gettempdir(), 'BMNFTs_Log.txt'), 'a') + + formatter = logging.Formatter( + '[%(asctime)s] [%(levelname)s] [%(filename)s > %(funcName)s() > Line:%(lineno)d]\n%(message)s\n' + ) + file_handler.setFormatter(formatter) + + log = logging.getLogger() + for handler in log.handlers[:]: + if isinstance(handler, logging.FileHandler): + log.removeHandler(handler) + if isinstance(handler, logging.StreamHandler): + log.removeHandler(handler) + log.addHandler(file_handler) + + # Record log to console: + console_handler = logging.StreamHandler(sys.stdout) + log.addHandler(console_handler) + + if bpy.context.scene.input_tool.enable_debug: + logging.getLogger().setLevel(logging.DEBUG) + else: + logging.getLogger().setLevel(logging.INFO) + + # ======== User input Property Group ======== # class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): # Create NFT Data Panel: @@ -498,8 +542,6 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="FILE_PATH" ) - # TODO: Add 'Other' panel inputs to Headless functionality. - # Other Panel: enable_auto_save: bpy.props.BoolProperty( name="Auto Save Before Generation", @@ -541,8 +583,8 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): enable_debug: bpy.props.BoolProperty( name="Enable Debug Mode", - description="Allows you to run Blend_My_NFTs without generating any content files and includes more " - "console information." + description="Allows you to run Blend_My_NFTs with debugging console messages saved to a BMNFTs_Log.txt " + "file." ) log_path: bpy.props.StringProperty( name="Debug Log Path", @@ -552,6 +594,11 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): subtype="FILE_PATH" ) + enable_dry_run: bpy.props.BoolProperty( + name="Enable Dry Run", + description="Allows you to run Blend_My_NFTs without generating any content files." + ) + # API Panel properties: api_key: bpy.props.StringProperty( name="API Key", @@ -560,7 +607,7 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup): # ======== Main Operators ======== # -class Createdata(bpy.types.Operator): +class CreateData(bpy.types.Operator): bl_idname = 'create.data' bl_label = 'Create Data' bl_description = 'Creates NFT Data. Run after any changes were made to scene. All previous data will be ' \ @@ -572,8 +619,10 @@ class Createdata(bpy.types.Operator): name="Reverse Order") def execute(self, context): + activate_logging() + # Handling Custom Fields UIList input: - input = getBMNFTData() + input = get_bmnft_data() if input.enable_logic: if input.enable_logic_json and not input.logic_file: @@ -600,8 +649,9 @@ class ExportNFTs(bpy.types.Operator): name="Reverse Order") def execute(self, context): - input = getBMNFTData() - # Handling Custom Fields UIList input: + activate_logging() + + input = get_bmnft_data() intermediate.render_and_save_nfts(input) @@ -617,17 +667,19 @@ class ResumeFailedBatch(bpy.types.Operator): bl_options = {"REGISTER", "UNDO"} def execute(self, context): + activate_logging() + _save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) _Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path) _batchToGenerate = bpy.context.scene.input_tool.batch_to_generate file_name = os.path.join(_batch_json_save_path, "Batch{}.json".format(_batchToGenerate)) - batchData = json.load(open(file_name)) + batch_data = json.load(open(file_name)) _fail_state, _failed_batch, _failed_dna, _failed_dna_index = helpers.check_failed_batches(_batch_json_save_path) - render_settings = batchData["Generation Save"][-1]["Render_Settings"] + render_settings = batch_data["Generation Save"][-1]["Render_Settings"] input = BMNFTData( nft_name=render_settings["nft_name"], @@ -682,6 +734,8 @@ class ResumeFailedBatch(bpy.types.Operator): enable_debug=render_settings["enable_debug"], log_path=render_settings["log_path"], + enable_dry_run=render_settings["enable_dry_run"], + fail_state=_fail_state, failed_batch=_failed_batch, failed_dna=_failed_dna, @@ -709,8 +763,9 @@ class RefactorBatches(bpy.types.Operator): name="Reverse Order") def execute(self, context): - # Passing info to main functions for refactoring: - refactorer.reformat_nft_collection(getBMNFTData()) + activate_logging() + + refactorer.reformat_nft_collection(get_bmnft_data()) return {"FINISHED"} def invoke(self, context, event): @@ -725,6 +780,8 @@ class ExportSettings(bpy.types.Operator): bl_options = {"REGISTER", "UNDO"} def execute(self, context): + activate_logging() + save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path) filename = "config.cfg" @@ -1082,8 +1139,10 @@ class BMNFTS_PT_Other(bpy.types.Panel): row = layout.row() row.prop(input_tool_scene, "enable_debug") if bpy.context.scene.input_tool.enable_debug: + row = layout.row() row.prop(input_tool_scene, "log_path") - + row = layout.row() + row.prop(input_tool_scene, "enable_dry_run") row = layout.row() row = layout.row() @@ -1111,20 +1170,20 @@ class BMNFTS_PT_Other(bpy.types.Panel): # ======== Blender add-on register/unregister handling ======== # classes = ( # Property Group Classes: - BMNFTS_PGT_Input_Properties, + BMNFTS_PGT_Input_Properties, - # Operator Classes: - Createdata, - ExportNFTs, - ResumeFailedBatch, - RefactorBatches, - ExportSettings, + # Operator Classes: + CreateData, + ExportNFTs, + ResumeFailedBatch, + RefactorBatches, + ExportSettings, - # Panel Classes: - BMNFTS_PT_CreateData, - BMNFTS_PT_GenerateNFTs, - BMNFTS_PT_Refactor, - BMNFTS_PT_Other, + # Panel Classes: + BMNFTS_PT_CreateData, + BMNFTS_PT_GenerateNFTs, + BMNFTS_PT_Refactor, + BMNFTS_PT_Other, ) + custom_metadata_ui_list.classes_Custom_Metadata_UIList + logic_ui_list.classes_Logic_UIList @@ -1157,4 +1216,4 @@ def unregister(): if __name__ == '__main__': register() - runAsHeadless() + run_as_headless() diff --git a/main/dna_generator.py b/main/dna_generator.py index 1b5314b..cbe9779 100644 --- a/main/dna_generator.py +++ b/main/dna_generator.py @@ -12,11 +12,7 @@ from functools import partial from . import logic, material_generator, helpers from .helpers import TextColors -logging.basicConfig( - level=logging.INFO, - format='[%(levelname)s][%(asctime)s]\n%(message)s\n', - datefmt='%Y-%m-%d %H:%M:%S' -) +log = logging.getLogger(__name__) def generate_nft_dna( @@ -110,25 +106,33 @@ def generate_nft_dna( single_dna = "" if not enable_rarity: single_dna = create_dna_random(hierarchy) - logging.debug(f"============\nOriginal DNA: {single_dna}") - print("============") - print(f"Original DNA: {single_dna}") + log.debug( + f"\n================" + f"\nOriginal DNA: {single_dna}" + ) if enable_rarity: single_dna = create_dna_rarity(hierarchy) - logging.debug(f"Rarity DNA: {single_dna}") - print(f"Rarity DNA: {single_dna}") + log.debug( + f"\n================" + f"\nRarity DNA: {single_dna}" + ) if enable_logic: single_dna = logic.logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity) - logging.debug(f"Logic DNA: {single_dna}") - print(f"Logic DNA: {single_dna}") + log.debug( + f"\n================" + f"\nLogic DNA: {single_dna}" + ) if enable_materials: single_dna = material_generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity) - logging.debug(f"Materials DNA: {single_dna}\n============\n") - print(f"Materials DNA: {single_dna}") - print("============\n") + log.debug( + f"\n================" + f"\nMaterials DNA: {single_dna}" + f"\n================\n" + + ) return single_dna @@ -179,9 +183,9 @@ def make_batches( batch_json_save_path ): """ - Sorts through all the batches and outputs a given number of batches depending on collection_size and nfts_per_batch. - These files are then saved as Batch#.json files to batch_json_save_path - """ + Sorts through all the batches and outputs a given number of batches depending on collection_size and nfts_per_batch. + These files are then saved as Batch#.json files to batch_json_save_path + """ # Clears the Batch Data folder of Batches: batch_list = os.listdir(batch_json_save_path) @@ -197,7 +201,6 @@ def make_batches( nft_record_save_path = os.path.join(blend_my_nf_ts_output, "NFTRecord.json") data_dictionary = json.load(open(nft_record_save_path)) - num_nfts_generated = data_dictionary["num_nfts_generated"] hierarchy = data_dictionary["hierarchy"] dna_list = data_dictionary["dna_list"] @@ -206,8 +209,10 @@ def make_batches( if remainder_dna > 0: num_batches += 1 - print(f"To generate batches of {nfts_per_batch} DNA sequences per batch, with a total of {num_nfts_generated}" - f" possible NFT DNA sequences, the number of batches generated will be {num_batches}") + log.info( + f"\nGenerating {num_batches} batch files. If the last batch isn't filled all the way the program will " + f"operate normally." + ) batches_dna_list = [] @@ -253,38 +258,37 @@ def send_to_record( repeat DNA. """ - if enable_debug: - logging.basicConfig( - filename=os.path.join(log_path, "BMNFTS_Log.txt"), - level=logging.DEBUG - ) - # Checking Scene is compatible with BMNFTs: helpers.check_scene() # Messages: - print( - f"\n{TextColors.OK}======== Creating NFT Data ========{TextColors.RESET}" - f"\nGenerating {collection_size} NFT DNA" + log.info( + f"\n{TextColors.OK}======== Creating NFT Data ({collection_size} DNA) ========{TextColors.RESET}" ) if not enable_rarity and not enable_logic: - print( - f"{TextColors.OK}NFT DNA will be determined randomly, no special properties or parameters are " - f"applied.\n{TextColors.RESET}") + log.info( + f"\n - NFT DNA will be determined randomly, no special properties or parameters are " + f"applied." + ) if enable_rarity: - print( - f"{TextColors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account." - f"{TextColors.RESET}" + log.info( + f"\n - Rarity is ON. Weights listed in .blend scene will be taken into account." + f"" ) if enable_logic: - print( - f"{TextColors.OK}Logic is ON. {len(list(logic_file.keys()))} rules detected and applied." - f"{TextColors.RESET}" + log.info( + f"\n - Logic is ON. {len(list(logic_file.keys()))} rules detected, implementation will " + f"be attempted." ) + if enable_materials: + log.info( + f"\n - Materials are ON. {len(list(json.load(open(materials_file)).keys()))} materials " + f"instances detected, implementation will be attempted." + ) time_start = time.time() def create_nft_data(): @@ -339,7 +343,7 @@ def send_to_record( ) # Loading Animation: - loading = helpers.Loader(f'Creating NFT DNA...', '').start() + loading = helpers.Loader(f'\nCreating NFT DNA...', '').start() create_nft_data() make_batches(collection_size, nfts_per_batch, save_path, batch_json_save_path) loading.stop() @@ -347,5 +351,5 @@ def send_to_record( time_end = time.time() print( - f"{TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{TextColors.RESET}" + f"\n{TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{TextColors.RESET}" ) diff --git a/main/exporter.py b/main/exporter.py index e51e3b3..1bfc2dc 100644 --- a/main/exporter.py +++ b/main/exporter.py @@ -98,6 +98,8 @@ def save_generation_state(input): "enable_debug": input.enable_debug, "log_path": input.log_path, + "enable_dry_run": input.enable_dry_run, + "custom_fields": input.custom_fields, }, }) diff --git a/main/helpers.py b/main/helpers.py index c86128d..d35f220 100644 --- a/main/helpers.py +++ b/main/helpers.py @@ -36,6 +36,8 @@ def remove_file_by_extension(dirlist): return return_dirs +# TODO: fix colours in console logs and find a way to include coloured text in .txt file. + class TextColors: """ The colour of console messages.