Refactoring print statements and exceptions

- Reformatted exceptions to work with new logging system
- Reformatted all print statements
- Moved `activate_debug()` to helpers.py
- Debug mode now shuts down all renders and 3D model exports to perform a 'Dry Run' to check that collections/objects/DNA still exist and work on render time.
pull/141/head
Torrin Leonard 2022-08-27 21:48:44 -04:00
rodzic 7474847ec6
commit 0931d99214
8 zmienionych plików z 322 dodań i 239 usunięć

Wyświetl plik

@ -26,8 +26,6 @@ from bpy.props import (IntProperty, BoolProperty, CollectionProperty)
import os
import sys
import json
import logging
import tempfile
import importlib
import traceback
from typing import Any
@ -364,42 +362,6 @@ def run_as_headless():
refactorer.reformat_nft_collection(input)
def activate_logging():
"""
Used as an intermediate activated at runtime of the following operators: CreateData, ExportNFTs, ResumeFailedBatch,
RefactorBatches, and ExportSettings. Must be independent of 'input' class to be safe, gets variables directly from
bpy.
"""
log_path = bpy.context.scene.input_tool.log_path
if log_path:
file_handler = logging.FileHandler(os.path.join(log_path, 'BMNFTs_Log.txt'), 'a')
else:
file_handler = logging.FileHandler(os.path.join(tempfile.gettempdir(), 'BMNFTs_Log.txt'), 'a')
formatter = logging.Formatter(
'[%(asctime)s] [%(levelname)s] [%(filename)s > %(funcName)s() > Line:%(lineno)d]\n%(message)s\n'
)
file_handler.setFormatter(formatter)
log = logging.getLogger()
for handler in log.handlers[:]:
if isinstance(handler, logging.FileHandler):
log.removeHandler(handler)
if isinstance(handler, logging.StreamHandler):
log.removeHandler(handler)
log.addHandler(file_handler)
# Record log to console:
console_handler = logging.StreamHandler(sys.stdout)
log.addHandler(console_handler)
if bpy.context.scene.input_tool.enable_debug:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)
# ======== User input Property Group ======== #
class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup):
# Create NFT Data Panel:
@ -583,8 +545,8 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup):
enable_debug: bpy.props.BoolProperty(
name="Enable Debug Mode",
description="Allows you to run Blend_My_NFTs with debugging console messages saved to a BMNFTs_Log.txt "
"file."
description="Allows you to run Blend_My_NFTs without generating any content files and enables debugging "
"console messages saved to a BMNFTs_Log.txt file."
)
log_path: bpy.props.StringProperty(
name="Debug Log Path",
@ -619,7 +581,7 @@ class CreateData(bpy.types.Operator):
name="Reverse Order")
def execute(self, context):
activate_logging()
helpers.activate_logging()
# Handling Custom Fields UIList input:
input = get_bmnft_data()
@ -649,7 +611,7 @@ class ExportNFTs(bpy.types.Operator):
name="Reverse Order")
def execute(self, context):
activate_logging()
helpers.activate_logging()
input = get_bmnft_data()
@ -667,7 +629,7 @@ class ResumeFailedBatch(bpy.types.Operator):
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
activate_logging()
helpers.activate_logging()
_save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)
_Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path)
@ -763,7 +725,7 @@ class RefactorBatches(bpy.types.Operator):
name="Reverse Order")
def execute(self, context):
activate_logging()
helpers.activate_logging()
refactorer.reformat_nft_collection(get_bmnft_data())
return {"FINISHED"}
@ -780,7 +742,7 @@ class ExportSettings(bpy.types.Operator):
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
activate_logging()
helpers.activate_logging()
save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)
filename = "config.cfg"
@ -1141,8 +1103,6 @@ class BMNFTS_PT_Other(bpy.types.Panel):
if bpy.context.scene.input_tool.enable_debug:
row = layout.row()
row.prop(input_tool_scene, "log_path")
row = layout.row()
row.prop(input_tool_scene, "enable_dry_run")
row = layout.row()
row = layout.row()

Wyświetl plik

@ -313,12 +313,15 @@ def send_to_record(
os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data"))
except FileNotFoundError:
raise FileNotFoundError(
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json, file not found. Check that your save path, logic file path, or "
f"materials file path is correct. For more information, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise
finally:
loading.stop()
@ -327,20 +330,21 @@ def send_to_record(
with open(nft_record_save_path, 'w') as outfile:
outfile.write(ledger + '\n')
print(
f"\n{TextColors.OK}Blend_My_NFTs Success:\n"
f"{len(data_dictionary['dna_list'])} NFT DNA saved to {nft_record_save_path}. NFT DNA Successfully "
f"created.\n{TextColors.RESET}")
log.info(
f"\n{TextColors.OK}{len(data_dictionary['dna_list'])} NFT data successfully saved to:"
f"\n{nft_record_save_path}{TextColors.RESET}"
)
except Exception:
traceback.print_exc()
raise (
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
f"see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
f"see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise
# Loading Animation:
loading = helpers.Loader(f'\nCreating NFT DNA...', '').start()
@ -350,6 +354,6 @@ def send_to_record(
time_end = time.time()
print(
f"\n{TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{TextColors.RESET}"
log.info(
f"\n{TextColors.OK}TIME [Created and Saved NFT data]: {time_end - time_start}s.\n{TextColors.RESET}"
)

Wyświetl plik

@ -8,12 +8,16 @@ import ssl
import time
import json
import smtplib
import logging
import datetime
import platform
import traceback
from .helpers import TextColors, Loader
from .metadata_templates import create_cardano_metadata, createSolanaMetaData, create_erc721_meta_data
log = logging.getLogger(__name__)
# Save info
def save_batch(batch, file_name):
@ -145,7 +149,9 @@ def render_and_save_nfts(input):
# If failed Batch is detected and user is resuming its generation:
if input.fail_state:
print(f"{TextColors.ERROR}\nResuming Batch #{input.failed_batch}\n{TextColors.RESET}")
log.info(
f"{TextColors.OK}\nResuming Batch #{input.failed_batch}{TextColors.RESET}"
)
nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.failed_batch, input.batch_json_save_path)
for a in range(input.failed_dna):
del batch_dna_list[0]
@ -153,7 +159,9 @@ def render_and_save_nfts(input):
# If user is generating the normal way:
else:
print(f"\nGenerating Batch #{input.batch_to_generate}\n")
log.info(
f"{TextColors.OK}\n======== Generating Batch #{input.batch_to_generate} ========{TextColors.RESET}"
)
nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.batch_to_generate, input.batch_json_save_path)
save_generation_state(input)
x = 1
@ -259,13 +267,15 @@ def render_and_save_nfts(input):
bpy.data.collections[j].hide_render = True
bpy.data.collections[j].hide_viewport = True
except KeyError:
raise TypeError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes "
f"to your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read "
f"your scene. For more information see:{TextColors.RESET}"
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise TypeError()
dna_dictionary = match_dna_to_variant(single_dna)
name = input.nft_name + "_" + str(order_num)
@ -275,14 +285,18 @@ def render_and_save_nfts(input):
# ob = bpy.data.objects['Text'] # Object name
# ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob
print(f"\n{TextColors.OK}======== Generating NFT {x}/{nfts_in_batch}: {name} ========{TextColors.RESET}")
print(f"\nVariants selected:")
print(f"{dna_dictionary}")
log.info(
f"\n{TextColors.OK}======== Generating NFT {x}/{nfts_in_batch}: {name} ========{TextColors.RESET}"
f"\nVariants selected:"
f"\n{dna_dictionary}"
)
if input.enable_materials:
print(f"\nMaterials selected:")
print(f"{material_dna_dictionary}")
log.info(
f"\nMaterials selected:"
f"\n{material_dna_dictionary}"
)
print(f"\nDNA Code:{full_single_dna}")
log.info(f"\nDNA Code:{full_single_dna}")
for c in dna_dictionary:
collection = dna_dictionary[c]
@ -320,7 +334,7 @@ def render_and_save_nfts(input):
# Generation/Rendering:
if input.enable_images:
print(f"\n{TextColors.OK}-------- Image --------{TextColors.RESET}")
log.info(f"\n{TextColors.OK}-------- Image --------{TextColors.RESET}")
image_render_time_start = time.time()
@ -332,7 +346,9 @@ def render_and_save_nfts(input):
bpy.context.scene.render.filepath = image_path
bpy.context.scene.render.image_settings.file_format = input.image_file_format
bpy.ops.render.render(write_still=True)
if not input.enable_debug:
bpy.ops.render.render(write_still=True)
# Loading Animation:
loading = Loader(f'Rendering Image {x}/{nfts_in_batch}...', '').start()
@ -341,13 +357,13 @@ def render_and_save_nfts(input):
image_render_time_end = time.time()
print(
f"{TextColors.OK}Rendered image in {image_render_time_end - image_render_time_start}s."
log.info(
f"{TextColors.OK}TIME [Rendered Image]: {image_render_time_end - image_render_time_start}s."
f"\n{TextColors.RESET}"
)
if input.enable_animations:
print(f"\n{TextColors.OK}-------- Animation --------{TextColors.RESET}")
log.info(f"\n{TextColors.OK}-------- Animation --------{TextColors.RESET}")
animation_render_time_start = time.time()
@ -357,34 +373,35 @@ def render_and_save_nfts(input):
if not os.path.exists(animation_folder):
os.makedirs(animation_folder)
if input.animation_file_format == 'MP4':
bpy.context.scene.render.filepath = animation_path
bpy.context.scene.render.image_settings.file_format = "FFMPEG"
if not input.enable_debug:
if input.animation_file_format == 'MP4':
bpy.context.scene.render.filepath = animation_path
bpy.context.scene.render.image_settings.file_format = "FFMPEG"
bpy.context.scene.render.ffmpeg.format = 'MPEG4'
bpy.context.scene.render.ffmpeg.codec = 'H264'
bpy.ops.render.render(animation=True)
bpy.context.scene.render.ffmpeg.format = 'MPEG4'
bpy.context.scene.render.ffmpeg.codec = 'H264'
bpy.ops.render.render(animation=True)
elif input.animation_file_format == 'PNG':
if not os.path.exists(animation_path):
os.makedirs(animation_path)
elif input.animation_file_format == 'PNG':
if not os.path.exists(animation_path):
os.makedirs(animation_path)
bpy.context.scene.render.filepath = os.path.join(animation_path, name)
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
bpy.context.scene.render.filepath = os.path.join(animation_path, name)
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
elif input.animation_file_format == 'TIFF':
if not os.path.exists(animation_path):
os.makedirs(animation_path)
elif input.animation_file_format == 'TIFF':
if not os.path.exists(animation_path):
os.makedirs(animation_path)
bpy.context.scene.render.filepath = os.path.join(animation_path, name)
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
bpy.context.scene.render.filepath = os.path.join(animation_path, name)
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
else:
bpy.context.scene.render.filepath = animation_path
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
else:
bpy.context.scene.render.filepath = animation_path
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
# Loading Animation:
loading = Loader(f'Rendering Animation {x}/{nfts_in_batch}...', '').start()
@ -393,13 +410,13 @@ def render_and_save_nfts(input):
animation_render_time_end = time.time()
print(
f"{TextColors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s."
f"\n{TextColors.RESET}"
log.info(
f"\n{TextColors.OK}TIME [Rendered Animation]: "
f"{animation_render_time_end - animation_render_time_start}s.{TextColors.RESET}"
)
if input.enable_models:
print(f"\n{TextColors.OK}-------- 3D Model --------{TextColors.RESET}")
log.info(f"\n{TextColors.OK}-------- 3D Model --------{TextColors.RESET}")
model_generation_time_start = time.time()
@ -424,65 +441,66 @@ def render_and_save_nfts(input):
# if obj.name in remove_objects:
# obj.select_set(False)
if input.model_file_format == 'GLB':
check_failed_exists(f"{model_path}.glb")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}.glb",
check_existing=True,
export_format='GLB',
export_keep_originals=True,
use_selection=True
)
if input.model_file_format == 'GLTF_SEPARATE':
check_failed_exists(f"{model_path}.gltf")
check_failed_exists(f"{model_path}.bin")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}",
check_existing=True,
export_format='GLTF_SEPARATE',
export_keep_originals=True,
use_selection=True
)
if input.model_file_format == 'GLTF_EMBEDDED':
check_failed_exists(f"{model_path}.gltf")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}.gltf",
check_existing=True,
export_format='GLTF_EMBEDDED',
export_keep_originals=True,
use_selection=True
)
elif input.model_file_format == 'FBX':
check_failed_exists(f"{model_path}.fbx")
bpy.ops.export_scene.fbx(
filepath=f"{model_path}.fbx",
check_existing=True,
use_selection=True
)
elif input.model_file_format == 'OBJ':
check_failed_exists(f"{model_path}.obj")
bpy.ops.export_scene.obj(
filepath=f"{model_path}.obj",
check_existing=True,
use_selection=True,
)
elif input.model_file_format == 'X3D':
check_failed_exists(f"{model_path}.x3d")
bpy.ops.export_scene.x3d(
filepath=f"{model_path}.x3d",
check_existing=True,
use_selection=True
)
elif input.model_file_format == 'STL':
check_failed_exists(f"{model_path}.stl")
bpy.ops.export_mesh.stl(
filepath=f"{model_path}.stl",
check_existing=True,
use_selection=True
)
elif input.model_file_format == 'VOX':
check_failed_exists(f"{model_path}.vox")
bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox")
if not input.enable_debug:
if input.model_file_format == 'GLB':
check_failed_exists(f"{model_path}.glb")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}.glb",
check_existing=True,
export_format='GLB',
export_keep_originals=True,
use_selection=True
)
if input.model_file_format == 'GLTF_SEPARATE':
check_failed_exists(f"{model_path}.gltf")
check_failed_exists(f"{model_path}.bin")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}",
check_existing=True,
export_format='GLTF_SEPARATE',
export_keep_originals=True,
use_selection=True
)
if input.model_file_format == 'GLTF_EMBEDDED':
check_failed_exists(f"{model_path}.gltf")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}.gltf",
check_existing=True,
export_format='GLTF_EMBEDDED',
export_keep_originals=True,
use_selection=True
)
elif input.model_file_format == 'FBX':
check_failed_exists(f"{model_path}.fbx")
bpy.ops.export_scene.fbx(
filepath=f"{model_path}.fbx",
check_existing=True,
use_selection=True
)
elif input.model_file_format == 'OBJ':
check_failed_exists(f"{model_path}.obj")
bpy.ops.export_scene.obj(
filepath=f"{model_path}.obj",
check_existing=True,
use_selection=True,
)
elif input.model_file_format == 'X3D':
check_failed_exists(f"{model_path}.x3d")
bpy.ops.export_scene.x3d(
filepath=f"{model_path}.x3d",
check_existing=True,
use_selection=True
)
elif input.model_file_format == 'STL':
check_failed_exists(f"{model_path}.stl")
bpy.ops.export_mesh.stl(
filepath=f"{model_path}.stl",
check_existing=True,
use_selection=True
)
elif input.model_file_format == 'VOX':
check_failed_exists(f"{model_path}.vox")
bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox")
# Loading Animation:
loading = Loader(f'Generating 3D model {x}/{nfts_in_batch}...', '').start()
@ -491,9 +509,9 @@ def render_and_save_nfts(input):
model_generation_time_end = time.time()
print(
f"{TextColors.OK}Generated 3D model in {model_generation_time_end - model_generation_time_start}s."
f"\n{TextColors.RESET}"
log.info(
f"\n{TextColors.OK}TIME [Generated 3D Model]: "
f"{model_generation_time_end - model_generation_time_start}s.{TextColors.RESET}"
)
# Generating Metadata:
@ -561,7 +579,7 @@ def render_and_save_nfts(input):
with open(os.path.join(bmnft_data_folder, "Data_" + name + ".json"), 'w') as outfile:
outfile.write(json_meta_data + '\n')
print(f"Completed {name} render in {time.time() - time_start_2}s")
log.info(f"{TextColors.OK}\nTIME [NFT {name} Generated]: {time.time() - time_start_2}s")
save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batch_to_generate)
@ -574,8 +592,11 @@ def render_and_save_nfts(input):
batch_complete_time = time.time() - time_start_1
print(f"\nAll NFTs successfully generated and sent to {input.nft_batch_save_path}"
f"\nCompleted all renders in Batch{input.batch_to_generate}.json in {batch_complete_time}s\n")
log.info(
f"\nAll NFTs in Batch {input.batch_to_generate} successfully generated and saved at:"
f"\n{input.nft_batch_save_path}"
f"\nTIME [Batch {input.batch_to_generate} Generated]: {batch_complete_time}s\n"
)
batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1,
"Average time per generation": batch_complete_time / x - 1}
@ -630,11 +651,9 @@ def render_and_save_nfts(input):
# Automatic Shutdown:
# If user selects automatic shutdown but did not specify time after Batch completion
def shutdown(time):
plateform = platform.system()
if plateform == "Windows":
if platform.system() == "Windows":
os.system(f"shutdown /s /t {time}")
if plateform == "Darwin":
if platform.system() == "Darwin":
os.system(f"shutdown /s /t {time}")
if input.enable_auto_shutdown and not input.specify_time_bool:

Wyświetl plik

@ -1,14 +1,20 @@
import bpy
import os
import sys
import json
import copy
import logging
import tempfile
import platform
import traceback
from time import sleep
from itertools import cycle
from threading import Thread
from shutil import get_terminal_size
from collections import Counter, defaultdict
log = logging.getLogger(__name__)
# ======== CONSTANTS ======== #
@ -139,24 +145,28 @@ def get_hierarchy():
for i in att_vars:
# Check if name follows naming conventions:
if int(i.count("_")) > 2 and int(i.split("_")[1]) > 0:
raise Exception(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise Exception()
try:
number = i.split("_")[1]
name = i.split("_")[0]
rarity = i.split("_")[2]
except IndexError:
raise Exception(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise Exception()
all_att_data_list[i] = {"name": name, "number": number, "rarity": rarity}
return all_att_data_list
@ -174,7 +184,7 @@ def get_hierarchy():
for a in hierarchy:
for b in hierarchy[a]:
for x in variant_meta_data:
if str(x)==str(b):
if str(x) == str(b):
(hierarchy[a])[b] = variant_meta_data[x]
return hierarchy
@ -195,10 +205,10 @@ def get_combinations():
for i in hierarchy:
# Ignore Collections with nothing in them
if len(hierarchy[i])!=0:
if len(hierarchy[i]) != 0:
hierarchy_by_num.append(len(hierarchy[i]))
else:
print(f"The following collection has been identified as empty: {i}")
log.warning(f"\nThe following collection has been identified as empty: {i}")
combinations = 1
for i in hierarchy_by_num:
@ -232,13 +242,14 @@ def check_scene(): # Not complete
scriptIgnoreCollection = bpy.data.collections["Script_Ignore"]
script_ignore_exists = True
except KeyError:
raise TypeError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"Add a Script_Ignore collection to your Blender scene and ensure the name is exactly 'Script_Ignore'. "
f"For more information, "
f"see:"
f"For more information, see:"
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{TextColors.RESET}"
)
raise
hierarchy = get_hierarchy()
collections = bpy.context.scene.collection
@ -288,23 +299,27 @@ def check_rarity(hierarchy, dna_list_formatted, save_path):
complete_data[i] = x
print(
f"\n{TextColors.OK}\n"
f"Rarity Checker is active. These are the percentages for each variant per attribute you set in your .blend"
f" file: \n{TextColors.RESET}"
)
# Saving Rarity data to console and log:
x = f"\nPercentages for each Variant per Attribute:"
for i in complete_data:
print(i + ":")
for j in complete_data[i]:
print(" " + j + ": " + complete_data[i][j][0] + " Occurrences: " + complete_data[i][j][1])
x += f"\n\n{i}:"
if complete_data[i]:
for j in complete_data[i]:
x += f"\n - {j}: {complete_data[i][j][0]} occurs {complete_data[i][j][1]} times."
else:
x += f"\n - Variants not selected."
log.info(x)
json_meta_data = json.dumps(complete_data, indent=1, ensure_ascii=True)
with open(os.path.join(save_path, "RarityData.json"), 'w') as outfile:
outfile.write(json_meta_data + '\n')
path = os.path.join(save_path, "RarityData.json")
print(TextColors.OK + f"Rarity Data has been saved to {path}." + TextColors.RESET)
log.info(
f"\nRarity data has been saved to:\n{path}"
)
def check_duplicates(dna_list_formatted):
@ -318,11 +333,18 @@ def check_duplicates(dna_list_formatted):
for x in dna_list:
if x in seen:
print(x)
duplicates += 1
seen.add(x)
print(f"\nNFTRecord.json contains {duplicates} duplicate NFT DNA.")
if duplicates > 0:
log.warning(
f"\n{TextColors.WARNING}Blend_My_NFTs Warning:\n"
f"{duplicates} duplicate NFT DNA was detected. This should not be possible. For more information, see:"
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure"
f"\n{TextColors.RESET}"
)
log.info(f"\n\nDuplicate NFT DNA found: {duplicates}")
def check_failed_batches(batch_json_save_path):
@ -355,7 +377,8 @@ def raise_error_num_batches(max_nfts, nfts_per_batch):
num_batches = max_nfts / nfts_per_batch
return num_batches
except ZeroDivisionError:
raise ZeroDivisionError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"The number of NFTs per Batch must be greater than ZERO."
f"Please review your Blender scene and ensure it follows "
@ -364,12 +387,14 @@ def raise_error_num_batches(max_nfts, nfts_per_batch):
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure"
f"\n{TextColors.RESET}"
)
raise ZeroDivisionError()
def raise_error_zero_combinations():
"""Checks if combinations is greater than 0, if so, raises error."""
if get_combinations() == 0:
raise ValueError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"The number of all possible combinations is ZERO. Please review your Blender scene and ensure it "
f"follows the naming conventions and scene structure. For more information, see:\n{TextColors.RESET}"
@ -377,10 +402,13 @@ def raise_error_zero_combinations():
f"\n{TextColors.RESET}"
)
raise ValueError()
def raise_error_num_batches_greater_then(num_batches):
if num_batches < 1:
raise ValueError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"The number of Batches is less than 1. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
@ -388,6 +416,7 @@ def raise_error_num_batches_greater_then(num_batches):
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure"
f"\n{TextColors.RESET}"
)
raise ValueError()
# Raise Warnings:
@ -397,12 +426,15 @@ def raise_warning_max_nfts(nfts_per_batch, collection_size):
"""
if nfts_per_batch > collection_size:
raise ValueError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.WARNING}Blend_My_NFTs Warning:\n"
f"The number of NFTs Per Batch you set is smaller than the NFT Collection Size you set."
f"\n{TextColors.RESET}"
)
raise ValueError()
def raise_warning_collection_size(dna_list, collection_size):
"""
@ -410,18 +442,21 @@ def raise_warning_collection_size(dna_list, collection_size):
"""
if len(dna_list) < collection_size:
print(f"\n{TextColors.WARNING} \nWARNING: \n"
f"Blend_My_NFTs cannot generate {collection_size} NFTs."
f" Only {len(dna_list)} NFT DNA were generated."
log.warning(
f"\n{traceback.format_exc()}"
f"\n{TextColors.WARNING} \nWARNING: \n"
f"Blend_My_NFTs cannot generate {collection_size} NFTs."
f" Only {len(dna_list)} NFT DNA were generated."
f"\nThis might be for a number of reasons:"
f"\n a) Rarity is preventing combinations from being generated (See "
f"https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n "
f"\n b) Logic is preventing combinations from being generated (See "
f"https://github.com/torrinworx/Blend_My_NFTs#logic).\n "
f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or "
f"Attributes to increase the recommended collection size.\n "
f"\n{TextColors.RESET}")
f"\nThis might be for a number of reasons:"
f"\n a) Rarity is preventing combinations from being generated (See "
f"https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n "
f"\n b) Logic is preventing combinations from being generated (See "
f"https://github.com/torrinworx/Blend_My_NFTs#logic).\n "
f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or "
f"Attributes to increase the recommended collection size.\n "
f"\n{TextColors.RESET}"
)
# ======== LOADING ANIMATION ======== #
@ -480,3 +515,39 @@ class Loader:
def __exit__(self, exc_type, exc_value, tb):
# handle exceptions with those variables ^
self.stop()
def activate_logging():
"""
Used as an intermediate activated at runtime of the following operators: CreateData, ExportNFTs, ResumeFailedBatch,
RefactorBatches, and ExportSettings. Must be independent of 'input' class to be safe, gets variables directly from
bpy.
"""
log_path = bpy.context.scene.input_tool.log_path
if log_path:
file_handler = logging.FileHandler(os.path.join(log_path, 'BMNFTs_Log.txt'), 'a')
else:
file_handler = logging.FileHandler(os.path.join(tempfile.gettempdir(), 'BMNFTs_Log.txt'), 'a')
formatter = logging.Formatter(
'[%(asctime)s] [%(levelname)s] [%(filename)s > %(funcName)s() > Line:%(lineno)d]\n%(message)s\n'
)
file_handler.setFormatter(formatter)
log = logging.getLogger()
for handler in log.handlers[:]:
if isinstance(handler, logging.FileHandler):
log.removeHandler(handler)
if isinstance(handler, logging.StreamHandler):
log.removeHandler(handler)
log.addHandler(file_handler)
# Record log to console:
console_handler = logging.StreamHandler(sys.stdout)
log.addHandler(console_handler)
if bpy.context.scene.input_tool.enable_debug:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)

Wyświetl plik

@ -1,9 +1,14 @@
import logging
import bpy
import json
from main import dna_generator, exporter
# TODO: migrate this code to the exporter.py to simplify render process into one file.
log = logging.getLogger(__name__)
# TODO: migrate this code to the dna_generator.py(send_to_record) and exporter.py(render_and_save) to simplify render
# process into one file.
def send_to_record(input, reverse_order=False):
@ -12,7 +17,10 @@ def send_to_record(input, reverse_order=False):
input.logic_file = json.load(open(input.logic_file))
if input.enable_logic_json and not input.logic_file:
print({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.")
log.error(
f"No Logic.json file path set. Please set the file path to your Logic.json file."
)
raise
if not input.enable_logic_json:
scn = bpy.context.scene
@ -66,19 +74,21 @@ def render_and_save_nfts(input, reverse_order=False):
for i in range(scn.custom_metadata_fields_index, -1, -1):
item = scn.custom_metadata_fields[i]
if item.field_name in list(input.custom_fields.keys()):
raise ValueError(
f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names "
f"are unique."
log.error(
f"A duplicate of '{item.field_name}' was found. Ensure all Custom Metadata field "
f"Names are unique."
)
raise ValueError()
else:
input.custom_fields[item.field_name] = item.field_value
else:
for item in scn.custom_metadata_fields:
if item.field_name in list(input.custom_fields.keys()):
raise ValueError(
f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names "
f"are unique."
log.error(
f"A duplicate of '{item.field_name}' was found. Ensure all Custom Metadata field "
f"Names are unique."
)
raise ValueError()
else:
input.custom_fields[item.field_name] = item.field_value

Wyświetl plik

@ -3,10 +3,14 @@
# dna_generator.py
import random
import logging
import traceback
import collections
from .helpers import TextColors
log = logging.getLogger(__name__)
def reconstruct_dna(deconstructed_dna):
reconstructed_dna = ""
@ -116,22 +120,26 @@ def apply_rules_to_dna(hierarchy, deconstructed_dna, if_dict, result_dict, resul
elif not if_zero_bool:
variant_num = random.choices(number_list_of_i, weights=rarity_list_of_i, k=1)
except IndexError:
raise IndexError(
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Attribute collection '{a}'. For more information on "
f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Attribute collection '{a}'. For more information on "
f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise IndexError()
else:
try:
variant_num = random.choices(number_list_of_i, k=1)
except IndexError:
raise IndexError(
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Attribute collection '{a}'. For more information on "
f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Attribute collection '{a}'. For more information on "
f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise IndexError()
deconstructed_dna[int(attribute_index)] = str(variant_num[0])
return deconstructed_dna
@ -281,7 +289,7 @@ def logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity):
result_dict_type,
)
if violates_rule:
# print(f"======={deconstructed_dna} VIOLATES RULE======")
log.debug(f"======={deconstructed_dna} VIOLATES RULE======")
deconstructed_dna = apply_rules_to_dna(
hierarchy,

Wyświetl plik

@ -5,8 +5,12 @@
import json
import random
import logging
import traceback
from .helpers import TextColors
log = logging.getLogger(__name__)
def select_material(material_list, variant, enable_rarity):
"""Selects a material from a passed material list. """
@ -39,22 +43,26 @@ def select_material(material_list, variant, enable_rarity):
elif not if_zero_bool:
selected_material = random.choices(material_list_of_i, weights=rarity_list_of_i, k=1)
except IndexError:
raise IndexError(
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Material List of the Variant collection '{variant}'. For more "
f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Material List of the Variant collection '{variant}'. For more "
f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise IndexError()
else:
try:
selected_material = random.choices(material_list_of_i, k=1)
except IndexError:
raise IndexError(
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Material List of the Variant collection '{variant}'. For more "
f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Material List of the Variant collection '{variant}'. For more "
f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise IndexError()
return selected_material[0], material_list

Wyświetl plik

@ -4,9 +4,12 @@
import os
import json
import shutil
import logging
from .helpers import remove_file_by_extension
log = logging.getLogger(__name__)
def reformat_nft_collection(refactor_panel_input):
complete_coll_path = os.path.join(refactor_panel_input.save_path, "Blend_My_NFTs Output", "Complete_Collection")
@ -41,6 +44,6 @@ def reformat_nft_collection(refactor_panel_input):
with open(os.path.join(complete_coll_path, "collection_info.json"), 'w') as outfile:
outfile.write(collection_info + '\n')
print(f"All NFT files stored and sorted to the Complete_Collection folder in {refactor_panel_input.save_path}")
log.info(f"All NFT files stored and sorted to the Complete_Collection folder in {refactor_panel_input.save_path}")
shutil.rmtree(refactor_panel_input.nft_batch_save_path)