Merge pull request #141 from torrinworx/Testing-Logger

Merging Testing logger into Debug-Mode
pull/142/head
Torrin Leonard 2022-08-27 21:50:45 -04:00 zatwierdzone przez GitHub
commit 04b47f81e3
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
8 zmienionych plików z 469 dodań i 300 usunięć

Wyświetl plik

@ -17,11 +17,11 @@ LAST_UPDATED = "01:02PM, Aug 24th, 2022"
# ======== Import handling ======== #
# Blender modules:
import bpy
from bpy.app.handlers import persistent
from bpy.props import (IntProperty,
BoolProperty,
CollectionProperty)
from bpy.props import (IntProperty, BoolProperty, CollectionProperty)
# Python modules:
import os
import sys
@ -35,7 +35,7 @@ from datetime import datetime, timezone
# "a little hacky bs" - matt159 ;)
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
# Local file imports:
# Local modules:
from main import \
helpers, \
dna_generator, \
@ -51,6 +51,7 @@ from UILists import \
custom_metadata_ui_list, \
logic_ui_list
# Refresh Locals for development:
if "bpy" in locals():
modules = {
"helpers": helpers,
@ -79,7 +80,7 @@ dt = datetime.now(timezone.utc).astimezone() # Date Time in UTC local
@persistent
def Refresh_UI(dummy1, dummy2):
def refresh_ui(dummy1, dummy2):
"""
Refreshes the UI upon user interacting with Blender (using depsgraph_update_post handler). Might be a better handler
to use.
@ -106,7 +107,7 @@ def Refresh_UI(dummy1, dummy2):
redraw_panel(refresh_panel_classes)
bpy.app.handlers.depsgraph_update_post.append(Refresh_UI)
bpy.app.handlers.depsgraph_update_post.append(refresh_ui)
# ======== Defining BMNFTs Data ======== #
@ -160,7 +161,11 @@ class BMNFTData:
sender_from: str
email_password: str
receiver_to: str
enable_debug: bool
log_path: str
enable_dry_run: str
custom_fields: dict = None
fail_state: Any = False
@ -172,13 +177,14 @@ class BMNFTData:
self.custom_fields = {}
def getBMNFTData():
def get_bmnft_data():
_save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)
_Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path)
# IMPORTANT: if a new directory variable is ever added, use 'bpy.path.abspath' instead of 'os.path.abspath'.
data = BMNFTData(
nft_name=bpy.context.scene.input_tool.nft_name,
save_path=_save_path,
save_path=bpy.path.abspath(_save_path), # Converting from Blender's relative path system to absolute.
nfts_per_batch=bpy.context.scene.input_tool.nfts_per_batch,
batch_to_generate=bpy.context.scene.input_tool.batch_to_generate,
collection_size=bpy.context.scene.input_tool.collection_size,
@ -191,7 +197,7 @@ def getBMNFTData():
enable_logic=bpy.context.scene.input_tool.enable_logic,
enable_logic_json=bpy.context.scene.input_tool.enable_logic_json,
logic_file=bpy.context.scene.input_tool.logic_file,
logic_file=bpy.path.abspath(bpy.context.scene.input_tool.logic_file),
enable_images=bpy.context.scene.input_tool.image_bool,
image_file_format=bpy.context.scene.input_tool.image_enum,
@ -225,7 +231,11 @@ def getBMNFTData():
sender_from=bpy.context.scene.input_tool.sender_from,
email_password=bpy.context.scene.input_tool.email_password,
receiver_to=bpy.context.scene.input_tool.receiver_to,
enable_debug=bpy.context.scene.input_tool.enable_debug
enable_debug=bpy.context.scene.input_tool.enable_debug,
log_path=bpy.path.abspath(bpy.context.scene.input_tool.log_path),
enable_dry_run=bpy.context.scene.input_tool.enable_dry_run
)
return data
@ -248,7 +258,7 @@ def make_directories(save_path):
return Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path
def runAsHeadless():
def run_as_headless():
"""
For use when running from the command line.
"""
@ -309,26 +319,26 @@ def runAsHeadless():
settings.collection_size = int(pairs[1][1])
settings.nfts_per_batch = int(pairs[2][1])
settings.save_path = pairs[3][1]
settings.enable_rarity = pairs[4][1]=='True'
settings.enable_logic = pairs[5][1]=='True'
settings.enable_rarity = pairs[4][1] == 'True'
settings.enable_logic = pairs[5][1] == 'True'
settings.enableLogicJson = pairs[6][1] == 'True'
settings.logic_file = pairs[7][1]
settings.image_bool = pairs[8][1]=='True'
settings.image_bool = pairs[8][1] == 'True'
settings.image_enum = pairs[9][1]
settings.animation_bool = pairs[10][1]=='True'
settings.animation_bool = pairs[10][1] == 'True'
settings.animation_enum = pairs[11][1]
settings.model_bool = pairs[12][1]=='True'
settings.model_bool = pairs[12][1] == 'True'
settings.model_enum = pairs[13][1]
settings.batch_to_generate = int(pairs[14][1])
settings.cardano_metadata_bool = pairs[15][1]=='True'
settings.cardano_metadata_bool = pairs[15][1] == 'True'
settings.cardano_description = pairs[16][1]
settings.erc721_metadata = pairs[17][1]=='True'
settings.erc721_metadata = pairs[17][1] == 'True'
settings.erc721_description = pairs[18][1]
settings.solana_metadata_bool = pairs[19][1]=='True'
settings.solana_metadata_bool = pairs[19][1] == 'True'
settings.solanaDescription = pairs[20][1]
settings.enable_custom_fields = pairs[21][1]=='True'
settings.enable_custom_fields = pairs[21][1] == 'True'
settings.custom_fields_file = pairs[22][1]
settings.enable_materials = pairs[23][1]=='True'
settings.enable_materials = pairs[23][1] == 'True'
settings.materials_file = pairs[24][1]
if args.save_path:
@ -337,7 +347,7 @@ def runAsHeadless():
if args.batch_number:
settings.batch_to_generate = args.batch_number
input = getBMNFTData()
input = get_bmnft_data()
if args.batch_data_path:
input.batch_json_save_path = args.batch_data_path
@ -494,8 +504,6 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup):
subtype="FILE_PATH"
)
# TODO: Add 'Other' panel inputs to Headless functionality.
# Other Panel:
enable_auto_save: bpy.props.BoolProperty(
name="Auto Save Before Generation",
@ -537,8 +545,20 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup):
enable_debug: bpy.props.BoolProperty(
name="Enable Debug Mode",
description="Allows you to run Blend_My_NFTs without generating any content files and includes more "
"console information."
description="Allows you to run Blend_My_NFTs without generating any content files and enables debugging "
"console messages saved to a BMNFTs_Log.txt file."
)
log_path: bpy.props.StringProperty(
name="Debug Log Path",
description="Path where BMNFT_Log.txt is located.",
default="",
maxlen=1024,
subtype="FILE_PATH"
)
enable_dry_run: bpy.props.BoolProperty(
name="Enable Dry Run",
description="Allows you to run Blend_My_NFTs without generating any content files."
)
# API Panel properties:
@ -549,7 +569,7 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup):
# ======== Main Operators ======== #
class Createdata(bpy.types.Operator):
class CreateData(bpy.types.Operator):
bl_idname = 'create.data'
bl_label = 'Create Data'
bl_description = 'Creates NFT Data. Run after any changes were made to scene. All previous data will be ' \
@ -561,8 +581,10 @@ class Createdata(bpy.types.Operator):
name="Reverse Order")
def execute(self, context):
helpers.activate_logging()
# Handling Custom Fields UIList input:
input = getBMNFTData()
input = get_bmnft_data()
if input.enable_logic:
if input.enable_logic_json and not input.logic_file:
@ -589,8 +611,9 @@ class ExportNFTs(bpy.types.Operator):
name="Reverse Order")
def execute(self, context):
input = getBMNFTData()
# Handling Custom Fields UIList input:
helpers.activate_logging()
input = get_bmnft_data()
intermediate.render_and_save_nfts(input)
@ -606,17 +629,19 @@ class ResumeFailedBatch(bpy.types.Operator):
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
helpers.activate_logging()
_save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)
_Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path)
_batchToGenerate = bpy.context.scene.input_tool.batch_to_generate
file_name = os.path.join(_batch_json_save_path, "Batch{}.json".format(_batchToGenerate))
batchData = json.load(open(file_name))
batch_data = json.load(open(file_name))
_fail_state, _failed_batch, _failed_dna, _failed_dna_index = helpers.check_failed_batches(_batch_json_save_path)
render_settings = batchData["Generation Save"][-1]["Render_Settings"]
render_settings = batch_data["Generation Save"][-1]["Render_Settings"]
input = BMNFTData(
nft_name=render_settings["nft_name"],
@ -669,6 +694,9 @@ class ResumeFailedBatch(bpy.types.Operator):
receiver_to=render_settings["receiver_to"],
enable_debug=render_settings["enable_debug"],
log_path=render_settings["log_path"],
enable_dry_run=render_settings["enable_dry_run"],
fail_state=_fail_state,
failed_batch=_failed_batch,
@ -697,8 +725,9 @@ class RefactorBatches(bpy.types.Operator):
name="Reverse Order")
def execute(self, context):
# Passing info to main functions for refactoring:
refactorer.reformat_nft_collection(getBMNFTData())
helpers.activate_logging()
refactorer.reformat_nft_collection(get_bmnft_data())
return {"FINISHED"}
def invoke(self, context, event):
@ -713,6 +742,8 @@ class ExportSettings(bpy.types.Operator):
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
helpers.activate_logging()
save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)
filename = "config.cfg"
@ -1069,7 +1100,9 @@ class BMNFTS_PT_Other(bpy.types.Panel):
row = layout.row()
row.prop(input_tool_scene, "enable_debug")
if bpy.context.scene.input_tool.enable_debug:
row = layout.row()
row.prop(input_tool_scene, "log_path")
row = layout.row()
row = layout.row()
@ -1080,8 +1113,12 @@ class BMNFTS_PT_Other(bpy.types.Panel):
icon='URL').url = "https://github.com/torrinworx/Blend_My_NFTs"
row = layout.row()
row.operator("wm.url_open", text="YouTube Tutorials",
icon='URL').url = "https://www.youtube.com/watch?v=ygKJYz4BjRs&list=PLuVvzaanutXcYtWmPVKu2bx83EYNxLRsX"
row.operator(
"wm.url_open",
text="YouTube Tutorials",
icon='URL'
).url = "https://www.youtube.com/watch?v=ygKJYz4BjRs&list=PLuVvzaanutXcYtWmPVKu2bx83EYNxLRsX"
row = layout.row()
row.operator("wm.url_open", text="Join Our Discord Community!",
icon='URL').url = "https://discord.gg/UpZt5Un57t"
@ -1093,20 +1130,20 @@ class BMNFTS_PT_Other(bpy.types.Panel):
# ======== Blender add-on register/unregister handling ======== #
classes = (
# Property Group Classes:
BMNFTS_PGT_Input_Properties,
BMNFTS_PGT_Input_Properties,
# Operator Classes:
Createdata,
ExportNFTs,
ResumeFailedBatch,
RefactorBatches,
ExportSettings,
# Operator Classes:
CreateData,
ExportNFTs,
ResumeFailedBatch,
RefactorBatches,
ExportSettings,
# Panel Classes:
BMNFTS_PT_CreateData,
BMNFTS_PT_GenerateNFTs,
BMNFTS_PT_Refactor,
BMNFTS_PT_Other,
# Panel Classes:
BMNFTS_PT_CreateData,
BMNFTS_PT_GenerateNFTs,
BMNFTS_PT_Refactor,
BMNFTS_PT_Other,
) + custom_metadata_ui_list.classes_Custom_Metadata_UIList + logic_ui_list.classes_Logic_UIList
@ -1139,4 +1176,4 @@ def unregister():
if __name__ == '__main__':
register()
runAsHeadless()
run_as_headless()

Wyświetl plik

@ -5,11 +5,15 @@ import os
import time
import json
import random
import logging
import traceback
from functools import partial
from . import logic, material_generator, helpers
from .helpers import TextColors
log = logging.getLogger(__name__)
def generate_nft_dna(
collection_size,
@ -18,7 +22,6 @@ def generate_nft_dna(
logic_file,
enable_materials,
materials_file,
enable_debug
):
"""
Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the dna_list.
@ -95,7 +98,7 @@ def generate_nft_dna(
single_dna = ''.join(single_dna.split('-', 1))
return single_dna
def singleCompleteDNA():
def single_complete_dna():
"""
This function applies Rarity and Logic to a single DNA created by createDNASingle() if Rarity or Logic specified
"""
@ -103,21 +106,33 @@ def generate_nft_dna(
single_dna = ""
if not enable_rarity:
single_dna = create_dna_random(hierarchy)
# print("============")
# print(f"Original DNA: {single_dna}")
log.debug(
f"\n================"
f"\nOriginal DNA: {single_dna}"
)
if enable_rarity:
single_dna = create_dna_rarity(hierarchy)
# print(f"Rarity DNA: {single_dna}")
log.debug(
f"\n================"
f"\nRarity DNA: {single_dna}"
)
if enable_logic:
single_dna = logic.logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity)
# print(f"Logic DNA: {single_dna}")
log.debug(
f"\n================"
f"\nLogic DNA: {single_dna}"
)
if enable_materials:
single_dna = material_generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity)
# print(f"Materials DNA: {single_dna}")
log.debug(
f"\n================"
f"\nMaterials DNA: {single_dna}"
f"\n================\n"
# print("============\n")
)
return single_dna
@ -129,7 +144,7 @@ def generate_nft_dna(
dna_set_return = set()
for i in range(collection_size):
dna_push_to_list = partial(singleCompleteDNA)
dna_push_to_list = partial(single_complete_dna)
dna_set_return |= {''.join([dna_push_to_list()]) for _ in range(collection_size - len(dna_set_return))}
@ -151,8 +166,6 @@ def generate_nft_dna(
dna_list = create_dna_list()
# Messages:
helpers.raise_warning_collection_size(dna_list, collection_size)
# Data stored in batchDataDictionary:
@ -170,9 +183,9 @@ def make_batches(
batch_json_save_path
):
"""
Sorts through all the batches and outputs a given number of batches depending on collection_size and nfts_per_batch.
These files are then saved as Batch#.json files to batch_json_save_path
"""
Sorts through all the batches and outputs a given number of batches depending on collection_size and nfts_per_batch.
These files are then saved as Batch#.json files to batch_json_save_path
"""
# Clears the Batch Data folder of Batches:
batch_list = os.listdir(batch_json_save_path)
@ -188,7 +201,6 @@ def make_batches(
nft_record_save_path = os.path.join(blend_my_nf_ts_output, "NFTRecord.json")
data_dictionary = json.load(open(nft_record_save_path))
num_nfts_generated = data_dictionary["num_nfts_generated"]
hierarchy = data_dictionary["hierarchy"]
dna_list = data_dictionary["dna_list"]
@ -197,8 +209,10 @@ def make_batches(
if remainder_dna > 0:
num_batches += 1
print(f"To generate batches of {nfts_per_batch} DNA sequences per batch, with a total of {num_nfts_generated}"
f" possible NFT DNA sequences, the number of batches generated will be {num_batches}")
log.info(
f"\nGenerating {num_batches} batch files. If the last batch isn't filled all the way the program will "
f"operate normally."
)
batches_dna_list = []
@ -234,7 +248,8 @@ def send_to_record(
materials_file,
blend_my_nfts_output,
batch_json_save_path,
enable_debug
enable_debug,
log_path
):
"""
Creates NFTRecord.json file and sends "batch_data_dictionary" to it. NFTRecord.json is a permanent record of all DNA
@ -247,28 +262,33 @@ def send_to_record(
helpers.check_scene()
# Messages:
print(
f"\n{TextColors.OK}======== Creating NFT Data ========{TextColors.RESET}"
f"\nGenerating {collection_size} NFT DNA"
log.info(
f"\n{TextColors.OK}======== Creating NFT Data ({collection_size} DNA) ========{TextColors.RESET}"
)
if not enable_rarity and not enable_logic:
print(
f"{TextColors.OK}NFT DNA will be determined randomly, no special properties or parameters are "
f"applied.\n{TextColors.RESET}")
log.info(
f"\n - NFT DNA will be determined randomly, no special properties or parameters are "
f"applied."
)
if enable_rarity:
print(
f"{TextColors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account."
f"{TextColors.RESET}"
log.info(
f"\n - Rarity is ON. Weights listed in .blend scene will be taken into account."
f""
)
if enable_logic:
print(
f"{TextColors.OK}Logic is ON. {len(list(logic_file.keys()))} rules detected and applied."
f"{TextColors.RESET}"
log.info(
f"\n - Logic is ON. {len(list(logic_file.keys()))} rules detected, implementation will "
f"be attempted."
)
if enable_materials:
log.info(
f"\n - Materials are ON. {len(list(json.load(open(materials_file)).keys()))} materials "
f"instances detected, implementation will be attempted."
)
time_start = time.time()
def create_nft_data():
@ -280,7 +300,6 @@ def send_to_record(
logic_file,
enable_materials,
materials_file,
enable_debug,
)
nft_record_save_path = os.path.join(blend_my_nfts_output, "NFTRecord.json")
@ -294,12 +313,15 @@ def send_to_record(
os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data"))
except FileNotFoundError:
raise FileNotFoundError(
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json, file not found. Check that your save path, logic file path, or "
f"materials file path is correct. For more information, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise
finally:
loading.stop()
@ -308,29 +330,30 @@ def send_to_record(
with open(nft_record_save_path, 'w') as outfile:
outfile.write(ledger + '\n')
print(
f"\n{TextColors.OK}Blend_My_NFTs Success:\n"
f"{len(data_dictionary['dna_list'])} NFT DNA saved to {nft_record_save_path}. NFT DNA Successfully "
f"created.\n{TextColors.RESET}")
except Exception:
traceback.print_exc()
raise (
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
f"see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.info(
f"\n{TextColors.OK}{len(data_dictionary['dna_list'])} NFT data successfully saved to:"
f"\n{nft_record_save_path}{TextColors.RESET}"
)
except Exception:
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
f"see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise
# Loading Animation:
loading = helpers.Loader(f'Creating NFT DNA...', '').start()
loading = helpers.Loader(f'\nCreating NFT DNA...', '').start()
create_nft_data()
make_batches(collection_size, nfts_per_batch, save_path, batch_json_save_path)
loading.stop()
time_end = time.time()
print(
f"{TextColors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{TextColors.RESET}"
log.info(
f"\n{TextColors.OK}TIME [Created and Saved NFT data]: {time_end - time_start}s.\n{TextColors.RESET}"
)

Wyświetl plik

@ -8,12 +8,16 @@ import ssl
import time
import json
import smtplib
import logging
import datetime
import platform
import traceback
from .helpers import TextColors, Loader
from .metadata_templates import create_cardano_metadata, createSolanaMetaData, create_erc721_meta_data
log = logging.getLogger(__name__)
# Save info
def save_batch(batch, file_name):
@ -96,6 +100,9 @@ def save_generation_state(input):
"receiver_to": input.receiver_to,
"enable_debug": input.enable_debug,
"log_path": input.log_path,
"enable_dry_run": input.enable_dry_run,
"custom_fields": input.custom_fields,
},
@ -142,7 +149,9 @@ def render_and_save_nfts(input):
# If failed Batch is detected and user is resuming its generation:
if input.fail_state:
print(f"{TextColors.ERROR}\nResuming Batch #{input.failed_batch}\n{TextColors.RESET}")
log.info(
f"{TextColors.OK}\nResuming Batch #{input.failed_batch}{TextColors.RESET}"
)
nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.failed_batch, input.batch_json_save_path)
for a in range(input.failed_dna):
del batch_dna_list[0]
@ -150,7 +159,9 @@ def render_and_save_nfts(input):
# If user is generating the normal way:
else:
print(f"\nGenerating Batch #{input.batch_to_generate}\n")
log.info(
f"{TextColors.OK}\n======== Generating Batch #{input.batch_to_generate} ========{TextColors.RESET}"
)
nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.batch_to_generate, input.batch_json_save_path)
save_generation_state(input)
x = 1
@ -256,13 +267,15 @@ def render_and_save_nfts(input):
bpy.data.collections[j].hide_render = True
bpy.data.collections[j].hide_viewport = True
except KeyError:
raise TypeError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes to "
f"your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read your "
f"scene. For more information see:{TextColors.RESET}"
f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes "
f"to your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read "
f"your scene. For more information see:{TextColors.RESET}"
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise TypeError()
dna_dictionary = match_dna_to_variant(single_dna)
name = input.nft_name + "_" + str(order_num)
@ -272,14 +285,18 @@ def render_and_save_nfts(input):
# ob = bpy.data.objects['Text'] # Object name
# ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob
print(f"\n{TextColors.OK}======== Generating NFT {x}/{nfts_in_batch}: {name} ========{TextColors.RESET}")
print(f"\nVariants selected:")
print(f"{dna_dictionary}")
log.info(
f"\n{TextColors.OK}======== Generating NFT {x}/{nfts_in_batch}: {name} ========{TextColors.RESET}"
f"\nVariants selected:"
f"\n{dna_dictionary}"
)
if input.enable_materials:
print(f"\nMaterials selected:")
print(f"{material_dna_dictionary}")
log.info(
f"\nMaterials selected:"
f"\n{material_dna_dictionary}"
)
print(f"\nDNA Code:{full_single_dna}")
log.info(f"\nDNA Code:{full_single_dna}")
for c in dna_dictionary:
collection = dna_dictionary[c]
@ -317,7 +334,7 @@ def render_and_save_nfts(input):
# Generation/Rendering:
if input.enable_images:
print(f"\n{TextColors.OK}-------- Image --------{TextColors.RESET}")
log.info(f"\n{TextColors.OK}-------- Image --------{TextColors.RESET}")
image_render_time_start = time.time()
@ -329,7 +346,9 @@ def render_and_save_nfts(input):
bpy.context.scene.render.filepath = image_path
bpy.context.scene.render.image_settings.file_format = input.image_file_format
bpy.ops.render.render(write_still=True)
if not input.enable_debug:
bpy.ops.render.render(write_still=True)
# Loading Animation:
loading = Loader(f'Rendering Image {x}/{nfts_in_batch}...', '').start()
@ -338,13 +357,13 @@ def render_and_save_nfts(input):
image_render_time_end = time.time()
print(
f"{TextColors.OK}Rendered image in {image_render_time_end - image_render_time_start}s."
log.info(
f"{TextColors.OK}TIME [Rendered Image]: {image_render_time_end - image_render_time_start}s."
f"\n{TextColors.RESET}"
)
if input.enable_animations:
print(f"\n{TextColors.OK}-------- Animation --------{TextColors.RESET}")
log.info(f"\n{TextColors.OK}-------- Animation --------{TextColors.RESET}")
animation_render_time_start = time.time()
@ -354,34 +373,35 @@ def render_and_save_nfts(input):
if not os.path.exists(animation_folder):
os.makedirs(animation_folder)
if input.animation_file_format =="MP4":
bpy.context.scene.render.filepath = animation_path
bpy.context.scene.render.image_settings.file_format = "FFMPEG"
if not input.enable_debug:
if input.animation_file_format == 'MP4':
bpy.context.scene.render.filepath = animation_path
bpy.context.scene.render.image_settings.file_format = "FFMPEG"
bpy.context.scene.render.ffmpeg.format = 'MPEG4'
bpy.context.scene.render.ffmpeg.codec = 'H264'
bpy.ops.render.render(animation=True)
bpy.context.scene.render.ffmpeg.format = 'MPEG4'
bpy.context.scene.render.ffmpeg.codec = 'H264'
bpy.ops.render.render(animation=True)
elif input.animation_file_format =='PNG':
if not os.path.exists(animation_path):
os.makedirs(animation_path)
elif input.animation_file_format == 'PNG':
if not os.path.exists(animation_path):
os.makedirs(animation_path)
bpy.context.scene.render.filepath = os.path.join(animation_path, name)
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
bpy.context.scene.render.filepath = os.path.join(animation_path, name)
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
elif input.animation_file_format =='TIFF':
if not os.path.exists(animation_path):
os.makedirs(animation_path)
elif input.animation_file_format == 'TIFF':
if not os.path.exists(animation_path):
os.makedirs(animation_path)
bpy.context.scene.render.filepath = os.path.join(animation_path, name)
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
bpy.context.scene.render.filepath = os.path.join(animation_path, name)
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
else:
bpy.context.scene.render.filepath = animation_path
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
else:
bpy.context.scene.render.filepath = animation_path
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
# Loading Animation:
loading = Loader(f'Rendering Animation {x}/{nfts_in_batch}...', '').start()
@ -390,13 +410,13 @@ def render_and_save_nfts(input):
animation_render_time_end = time.time()
print(
f"{TextColors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s."
f"\n{TextColors.RESET}"
log.info(
f"\n{TextColors.OK}TIME [Rendered Animation]: "
f"{animation_render_time_end - animation_render_time_start}s.{TextColors.RESET}"
)
if input.enable_models:
print(f"\n{TextColors.OK}-------- 3D Model --------{TextColors.RESET}")
log.info(f"\n{TextColors.OK}-------- 3D Model --------{TextColors.RESET}")
model_generation_time_start = time.time()
@ -421,65 +441,66 @@ def render_and_save_nfts(input):
# if obj.name in remove_objects:
# obj.select_set(False)
if input.model_file_format =='GLB':
check_failed_exists(f"{model_path}.glb")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}.glb",
check_existing=True,
export_format='GLB',
export_keep_originals=True,
use_selection=True
)
if input.model_file_format =='GLTF_SEPARATE':
check_failed_exists(f"{model_path}.gltf")
check_failed_exists(f"{model_path}.bin")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}",
check_existing=True,
export_format='GLTF_SEPARATE',
export_keep_originals=True,
use_selection=True
)
if input.model_file_format =='GLTF_EMBEDDED':
check_failed_exists(f"{model_path}.gltf")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}.gltf",
check_existing=True,
export_format='GLTF_EMBEDDED',
export_keep_originals=True,
use_selection=True
)
elif input.model_file_format =='FBX':
check_failed_exists(f"{model_path}.fbx")
bpy.ops.export_scene.fbx(
filepath=f"{model_path}.fbx",
check_existing=True,
use_selection=True
)
elif input.model_file_format =='OBJ':
check_failed_exists(f"{model_path}.obj")
bpy.ops.export_scene.obj(
filepath=f"{model_path}.obj",
check_existing=True,
use_selection=True,
)
elif input.model_file_format =='X3D':
check_failed_exists(f"{model_path}.x3d")
bpy.ops.export_scene.x3d(
filepath=f"{model_path}.x3d",
check_existing=True,
use_selection=True
)
elif input.model_file_format =='STL':
check_failed_exists(f"{model_path}.stl")
bpy.ops.export_mesh.stl(
filepath=f"{model_path}.stl",
check_existing=True,
use_selection=True
)
elif input.model_file_format =='VOX':
check_failed_exists(f"{model_path}.vox")
bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox")
if not input.enable_debug:
if input.model_file_format == 'GLB':
check_failed_exists(f"{model_path}.glb")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}.glb",
check_existing=True,
export_format='GLB',
export_keep_originals=True,
use_selection=True
)
if input.model_file_format == 'GLTF_SEPARATE':
check_failed_exists(f"{model_path}.gltf")
check_failed_exists(f"{model_path}.bin")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}",
check_existing=True,
export_format='GLTF_SEPARATE',
export_keep_originals=True,
use_selection=True
)
if input.model_file_format == 'GLTF_EMBEDDED':
check_failed_exists(f"{model_path}.gltf")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}.gltf",
check_existing=True,
export_format='GLTF_EMBEDDED',
export_keep_originals=True,
use_selection=True
)
elif input.model_file_format == 'FBX':
check_failed_exists(f"{model_path}.fbx")
bpy.ops.export_scene.fbx(
filepath=f"{model_path}.fbx",
check_existing=True,
use_selection=True
)
elif input.model_file_format == 'OBJ':
check_failed_exists(f"{model_path}.obj")
bpy.ops.export_scene.obj(
filepath=f"{model_path}.obj",
check_existing=True,
use_selection=True,
)
elif input.model_file_format == 'X3D':
check_failed_exists(f"{model_path}.x3d")
bpy.ops.export_scene.x3d(
filepath=f"{model_path}.x3d",
check_existing=True,
use_selection=True
)
elif input.model_file_format == 'STL':
check_failed_exists(f"{model_path}.stl")
bpy.ops.export_mesh.stl(
filepath=f"{model_path}.stl",
check_existing=True,
use_selection=True
)
elif input.model_file_format == 'VOX':
check_failed_exists(f"{model_path}.vox")
bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox")
# Loading Animation:
loading = Loader(f'Generating 3D model {x}/{nfts_in_batch}...', '').start()
@ -488,9 +509,9 @@ def render_and_save_nfts(input):
model_generation_time_end = time.time()
print(
f"{TextColors.OK}Generated 3D model in {model_generation_time_end - model_generation_time_start}s."
f"\n{TextColors.RESET}"
log.info(
f"\n{TextColors.OK}TIME [Generated 3D Model]: "
f"{model_generation_time_end - model_generation_time_start}s.{TextColors.RESET}"
)
# Generating Metadata:
@ -558,7 +579,7 @@ def render_and_save_nfts(input):
with open(os.path.join(bmnft_data_folder, "Data_" + name + ".json"), 'w') as outfile:
outfile.write(json_meta_data + '\n')
print(f"Completed {name} render in {time.time() - time_start_2}s")
log.info(f"{TextColors.OK}\nTIME [NFT {name} Generated]: {time.time() - time_start_2}s")
save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batch_to_generate)
@ -571,13 +592,21 @@ def render_and_save_nfts(input):
batch_complete_time = time.time() - time_start_1
print(f"\nAll NFTs successfully generated and sent to {input.nft_batch_save_path}"
f"\nCompleted all renders in Batch{input.batch_to_generate}.json in {batch_complete_time}s\n")
log.info(
f"\nAll NFTs in Batch {input.batch_to_generate} successfully generated and saved at:"
f"\n{input.nft_batch_save_path}"
f"\nTIME [Batch {input.batch_to_generate} Generated]: {batch_complete_time}s\n"
)
batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1,
"Average time per generation": batch_complete_time / x - 1}
batch_info_folder = os.path.join(input.nft_batch_save_path, "Batch" + str(input.batch_to_generate), "batch_info.json")
batch_info_folder = os.path.join(
input.nft_batch_save_path,
"Batch" + str(input.batch_to_generate),
"batch_info.json"
)
save_batch(batch_info, batch_info_folder)
# Send Email that Batch is complete:
@ -622,11 +651,9 @@ def render_and_save_nfts(input):
# Automatic Shutdown:
# If user selects automatic shutdown but did not specify time after Batch completion
def shutdown(time):
plateform = platform.system()
if plateform == "Windows":
if platform.system() == "Windows":
os.system(f"shutdown /s /t {time}")
if plateform == "Darwin":
if platform.system() == "Darwin":
os.system(f"shutdown /s /t {time}")
if input.enable_auto_shutdown and not input.specify_time_bool:

Wyświetl plik

@ -1,30 +1,19 @@
import bpy
import os
import sys
import json
import copy
import logging
import tempfile
import platform
import traceback
from time import sleep
from itertools import cycle
from threading import Thread
from shutil import get_terminal_size
from collections import Counter, defaultdict
# ======== ENABLE DEBUG ======== #
# This section is used for debugging, coding, or general testing purposes.
def enable_debug(enable_debug_bool):
if enable_debug_bool:
import logging
logging.basicConfig(
filename="./log.txt",
level=logging.DEBUG,
format='[%(levelname)s][%(asctime)s]\n%(message)s\n',
datefmt='%Y-%m-%d %H:%M:%S'
)
log = logging.getLogger(__name__)
# ======== CONSTANTS ======== #
@ -53,6 +42,8 @@ def remove_file_by_extension(dirlist):
return return_dirs
# TODO: fix colours in console logs and find a way to include coloured text in .txt file.
class TextColors:
"""
The colour of console messages.
@ -154,24 +145,28 @@ def get_hierarchy():
for i in att_vars:
# Check if name follows naming conventions:
if int(i.count("_")) > 2 and int(i.split("_")[1]) > 0:
raise Exception(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise Exception()
try:
number = i.split("_")[1]
name = i.split("_")[0]
rarity = i.split("_")[2]
except IndexError:
raise Exception(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise Exception()
all_att_data_list[i] = {"name": name, "number": number, "rarity": rarity}
return all_att_data_list
@ -189,7 +184,7 @@ def get_hierarchy():
for a in hierarchy:
for b in hierarchy[a]:
for x in variant_meta_data:
if str(x)==str(b):
if str(x) == str(b):
(hierarchy[a])[b] = variant_meta_data[x]
return hierarchy
@ -210,10 +205,10 @@ def get_combinations():
for i in hierarchy:
# Ignore Collections with nothing in them
if len(hierarchy[i])!=0:
if len(hierarchy[i]) != 0:
hierarchy_by_num.append(len(hierarchy[i]))
else:
print(f"The following collection has been identified as empty: {i}")
log.warning(f"\nThe following collection has been identified as empty: {i}")
combinations = 1
for i in hierarchy_by_num:
@ -247,13 +242,14 @@ def check_scene(): # Not complete
scriptIgnoreCollection = bpy.data.collections["Script_Ignore"]
script_ignore_exists = True
except KeyError:
raise TypeError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"Add a Script_Ignore collection to your Blender scene and ensure the name is exactly 'Script_Ignore'. "
f"For more information, "
f"see:"
f"For more information, see:"
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{TextColors.RESET}"
)
raise
hierarchy = get_hierarchy()
collections = bpy.context.scene.collection
@ -303,23 +299,27 @@ def check_rarity(hierarchy, dna_list_formatted, save_path):
complete_data[i] = x
print(
f"\n{TextColors.OK}\n"
f"Rarity Checker is active. These are the percentages for each variant per attribute you set in your .blend"
f" file: \n{TextColors.RESET}"
)
# Saving Rarity data to console and log:
x = f"\nPercentages for each Variant per Attribute:"
for i in complete_data:
print(i + ":")
for j in complete_data[i]:
print(" " + j + ": " + complete_data[i][j][0] + " Occurrences: " + complete_data[i][j][1])
x += f"\n\n{i}:"
if complete_data[i]:
for j in complete_data[i]:
x += f"\n - {j}: {complete_data[i][j][0]} occurs {complete_data[i][j][1]} times."
else:
x += f"\n - Variants not selected."
log.info(x)
json_meta_data = json.dumps(complete_data, indent=1, ensure_ascii=True)
with open(os.path.join(save_path, "RarityData.json"), 'w') as outfile:
outfile.write(json_meta_data + '\n')
path = os.path.join(save_path, "RarityData.json")
print(TextColors.OK + f"Rarity Data has been saved to {path}." + TextColors.RESET)
log.info(
f"\nRarity data has been saved to:\n{path}"
)
def check_duplicates(dna_list_formatted):
@ -333,11 +333,18 @@ def check_duplicates(dna_list_formatted):
for x in dna_list:
if x in seen:
print(x)
duplicates += 1
seen.add(x)
print(f"\nNFTRecord.json contains {duplicates} duplicate NFT DNA.")
if duplicates > 0:
log.warning(
f"\n{TextColors.WARNING}Blend_My_NFTs Warning:\n"
f"{duplicates} duplicate NFT DNA was detected. This should not be possible. For more information, see:"
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure"
f"\n{TextColors.RESET}"
)
log.info(f"\n\nDuplicate NFT DNA found: {duplicates}")
def check_failed_batches(batch_json_save_path):
@ -370,7 +377,8 @@ def raise_error_num_batches(max_nfts, nfts_per_batch):
num_batches = max_nfts / nfts_per_batch
return num_batches
except ZeroDivisionError:
raise ZeroDivisionError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"The number of NFTs per Batch must be greater than ZERO."
f"Please review your Blender scene and ensure it follows "
@ -379,12 +387,14 @@ def raise_error_num_batches(max_nfts, nfts_per_batch):
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure"
f"\n{TextColors.RESET}"
)
raise ZeroDivisionError()
def raise_error_zero_combinations():
"""Checks if combinations is greater than 0, if so, raises error."""
if get_combinations() == 0:
raise ValueError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"The number of all possible combinations is ZERO. Please review your Blender scene and ensure it "
f"follows the naming conventions and scene structure. For more information, see:\n{TextColors.RESET}"
@ -392,10 +402,13 @@ def raise_error_zero_combinations():
f"\n{TextColors.RESET}"
)
raise ValueError()
def raise_error_num_batches_greater_then(num_batches):
if num_batches < 1:
raise ValueError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"The number of Batches is less than 1. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
@ -403,6 +416,7 @@ def raise_error_num_batches_greater_then(num_batches):
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure"
f"\n{TextColors.RESET}"
)
raise ValueError()
# Raise Warnings:
@ -412,12 +426,15 @@ def raise_warning_max_nfts(nfts_per_batch, collection_size):
"""
if nfts_per_batch > collection_size:
raise ValueError(
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.WARNING}Blend_My_NFTs Warning:\n"
f"The number of NFTs Per Batch you set is smaller than the NFT Collection Size you set."
f"\n{TextColors.RESET}"
)
raise ValueError()
def raise_warning_collection_size(dna_list, collection_size):
"""
@ -425,18 +442,21 @@ def raise_warning_collection_size(dna_list, collection_size):
"""
if len(dna_list) < collection_size:
print(f"\n{TextColors.WARNING} \nWARNING: \n"
f"Blend_My_NFTs cannot generate {collection_size} NFTs."
f" Only {len(dna_list)} NFT DNA were generated."
log.warning(
f"\n{traceback.format_exc()}"
f"\n{TextColors.WARNING} \nWARNING: \n"
f"Blend_My_NFTs cannot generate {collection_size} NFTs."
f" Only {len(dna_list)} NFT DNA were generated."
f"\nThis might be for a number of reasons:"
f"\n a) Rarity is preventing combinations from being generated (See "
f"https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n "
f"\n b) Logic is preventing combinations from being generated (See "
f"https://github.com/torrinworx/Blend_My_NFTs#logic).\n "
f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or "
f"Attributes to increase the recommended collection size.\n "
f"\n{TextColors.RESET}")
f"\nThis might be for a number of reasons:"
f"\n a) Rarity is preventing combinations from being generated (See "
f"https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n "
f"\n b) Logic is preventing combinations from being generated (See "
f"https://github.com/torrinworx/Blend_My_NFTs#logic).\n "
f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or "
f"Attributes to increase the recommended collection size.\n "
f"\n{TextColors.RESET}"
)
# ======== LOADING ANIMATION ======== #
@ -495,3 +515,39 @@ class Loader:
def __exit__(self, exc_type, exc_value, tb):
# handle exceptions with those variables ^
self.stop()
def activate_logging():
"""
Used as an intermediate activated at runtime of the following operators: CreateData, ExportNFTs, ResumeFailedBatch,
RefactorBatches, and ExportSettings. Must be independent of 'input' class to be safe, gets variables directly from
bpy.
"""
log_path = bpy.context.scene.input_tool.log_path
if log_path:
file_handler = logging.FileHandler(os.path.join(log_path, 'BMNFTs_Log.txt'), 'a')
else:
file_handler = logging.FileHandler(os.path.join(tempfile.gettempdir(), 'BMNFTs_Log.txt'), 'a')
formatter = logging.Formatter(
'[%(asctime)s] [%(levelname)s] [%(filename)s > %(funcName)s() > Line:%(lineno)d]\n%(message)s\n'
)
file_handler.setFormatter(formatter)
log = logging.getLogger()
for handler in log.handlers[:]:
if isinstance(handler, logging.FileHandler):
log.removeHandler(handler)
if isinstance(handler, logging.StreamHandler):
log.removeHandler(handler)
log.addHandler(file_handler)
# Record log to console:
console_handler = logging.StreamHandler(sys.stdout)
log.addHandler(console_handler)
if bpy.context.scene.input_tool.enable_debug:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)

Wyświetl plik

@ -1,9 +1,14 @@
import logging
import bpy
import json
from main import dna_generator, exporter
# TODO: migrate this code to the exporter.py to simplify render process into one file.
log = logging.getLogger(__name__)
# TODO: migrate this code to the dna_generator.py(send_to_record) and exporter.py(render_and_save) to simplify render
# process into one file.
def send_to_record(input, reverse_order=False):
@ -12,7 +17,10 @@ def send_to_record(input, reverse_order=False):
input.logic_file = json.load(open(input.logic_file))
if input.enable_logic_json and not input.logic_file:
print({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.")
log.error(
f"No Logic.json file path set. Please set the file path to your Logic.json file."
)
raise
if not input.enable_logic_json:
scn = bpy.context.scene
@ -29,7 +37,6 @@ def send_to_record(input, reverse_order=False):
"IF": item_list1.split(','),
rule_type: item_list2.split(',')
}
print(rule_type)
num += 1
else:
input.logic_file = {}
@ -42,8 +49,6 @@ def send_to_record(input, reverse_order=False):
"IF": item_list1.split(','),
rule_type: item_list2.split(',')
}
print(rule_type)
num += 1
dna_generator.send_to_record(
@ -58,6 +63,7 @@ def send_to_record(input, reverse_order=False):
input.blend_my_nfts_output,
input.batch_json_save_path,
input.enable_debug,
input.log_path
)
@ -68,19 +74,21 @@ def render_and_save_nfts(input, reverse_order=False):
for i in range(scn.custom_metadata_fields_index, -1, -1):
item = scn.custom_metadata_fields[i]
if item.field_name in list(input.custom_fields.keys()):
raise ValueError(
f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names "
f"are unique."
log.error(
f"A duplicate of '{item.field_name}' was found. Ensure all Custom Metadata field "
f"Names are unique."
)
raise ValueError()
else:
input.custom_fields[item.field_name] = item.field_value
else:
for item in scn.custom_metadata_fields:
if item.field_name in list(input.custom_fields.keys()):
raise ValueError(
f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names "
f"are unique."
log.error(
f"A duplicate of '{item.field_name}' was found. Ensure all Custom Metadata field "
f"Names are unique."
)
raise ValueError()
else:
input.custom_fields[item.field_name] = item.field_value

Wyświetl plik

@ -3,10 +3,14 @@
# dna_generator.py
import random
import logging
import traceback
import collections
from .helpers import TextColors
log = logging.getLogger(__name__)
def reconstruct_dna(deconstructed_dna):
reconstructed_dna = ""
@ -116,22 +120,26 @@ def apply_rules_to_dna(hierarchy, deconstructed_dna, if_dict, result_dict, resul
elif not if_zero_bool:
variant_num = random.choices(number_list_of_i, weights=rarity_list_of_i, k=1)
except IndexError:
raise IndexError(
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Attribute collection '{a}'. For more information on "
f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Attribute collection '{a}'. For more information on "
f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise IndexError()
else:
try:
variant_num = random.choices(number_list_of_i, k=1)
except IndexError:
raise IndexError(
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Attribute collection '{a}'. For more information on "
f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Attribute collection '{a}'. For more information on "
f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise IndexError()
deconstructed_dna[int(attribute_index)] = str(variant_num[0])
return deconstructed_dna
@ -261,7 +269,6 @@ def logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity):
did_reconstruct = False
for rule in logic_file:
# Items from 'IF' key for a given rule
print(logic_file)
if_dict = create_dicts(hierarchy, logic_file[rule]["IF"], "IF")
result_dict_type = ""
@ -282,7 +289,7 @@ def logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity):
result_dict_type,
)
if violates_rule:
# print(f"======={deconstructed_dna} VIOLATES RULE======")
log.debug(f"======={deconstructed_dna} VIOLATES RULE======")
deconstructed_dna = apply_rules_to_dna(
hierarchy,

Wyświetl plik

@ -5,8 +5,12 @@
import json
import random
import logging
import traceback
from .helpers import TextColors
log = logging.getLogger(__name__)
def select_material(material_list, variant, enable_rarity):
"""Selects a material from a passed material list. """
@ -39,22 +43,26 @@ def select_material(material_list, variant, enable_rarity):
elif not if_zero_bool:
selected_material = random.choices(material_list_of_i, weights=rarity_list_of_i, k=1)
except IndexError:
raise IndexError(
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Material List of the Variant collection '{variant}'. For more "
f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Material List of the Variant collection '{variant}'. For more "
f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise IndexError()
else:
try:
selected_material = random.choices(material_list_of_i, k=1)
except IndexError:
raise IndexError(
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Material List of the Variant collection '{variant}'. For more "
f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
log.error(
f"\n{traceback.format_exc()}"
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Material List of the Variant collection '{variant}'. For more "
f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
raise IndexError()
return selected_material[0], material_list

Wyświetl plik

@ -4,9 +4,12 @@
import os
import json
import shutil
import logging
from .helpers import remove_file_by_extension
log = logging.getLogger(__name__)
def reformat_nft_collection(refactor_panel_input):
complete_coll_path = os.path.join(refactor_panel_input.save_path, "Blend_My_NFTs Output", "Complete_Collection")
@ -41,6 +44,6 @@ def reformat_nft_collection(refactor_panel_input):
with open(os.path.join(complete_coll_path, "collection_info.json"), 'w') as outfile:
outfile.write(collection_info + '\n')
print(f"All NFT files stored and sorted to the Complete_Collection folder in {refactor_panel_input.save_path}")
log.info(f"All NFT files stored and sorted to the Complete_Collection folder in {refactor_panel_input.save_path}")
shutil.rmtree(refactor_panel_input.nft_batch_save_path)