Trying to implement logging for debug

pull/141/head
Torrin Leonard 2022-08-25 09:41:22 -04:00
rodzic de079b94ef
commit 1c26cea5fc
6 zmienionych plików z 72 dodań i 53 usunięć

Wyświetl plik

@ -160,7 +160,9 @@ class BMNFTData:
sender_from: str
email_password: str
receiver_to: str
enable_debug: bool
log_path: str
custom_fields: dict = None
fail_state: Any = False
@ -225,7 +227,9 @@ def getBMNFTData():
sender_from=bpy.context.scene.input_tool.sender_from,
email_password=bpy.context.scene.input_tool.email_password,
receiver_to=bpy.context.scene.input_tool.receiver_to,
enable_debug=bpy.context.scene.input_tool.enable_debug
enable_debug=bpy.context.scene.input_tool.enable_debug,
log_path=bpy.context.scene.input_tool.log_path,
)
return data
@ -540,6 +544,13 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup):
description="Allows you to run Blend_My_NFTs without generating any content files and includes more "
"console information."
)
log_path: bpy.props.StringProperty(
name="Debug Log Path",
description="Path where BMNFT_Log.txt is located.",
default="",
maxlen=1024,
subtype="FILE_PATH"
)
# API Panel properties:
api_key: bpy.props.StringProperty(
@ -669,6 +680,7 @@ class ResumeFailedBatch(bpy.types.Operator):
receiver_to=render_settings["receiver_to"],
enable_debug=render_settings["enable_debug"],
log_path=render_settings["log_path"],
fail_state=_fail_state,
failed_batch=_failed_batch,
@ -1069,6 +1081,8 @@ class BMNFTS_PT_Other(bpy.types.Panel):
row = layout.row()
row.prop(input_tool_scene, "enable_debug")
if bpy.context.scene.input_tool.enable_debug:
row.prop(input_tool_scene, "log_path")
row = layout.row()
@ -1080,8 +1094,12 @@ class BMNFTS_PT_Other(bpy.types.Panel):
icon='URL').url = "https://github.com/torrinworx/Blend_My_NFTs"
row = layout.row()
row.operator("wm.url_open", text="YouTube Tutorials",
icon='URL').url = "https://www.youtube.com/watch?v=ygKJYz4BjRs&list=PLuVvzaanutXcYtWmPVKu2bx83EYNxLRsX"
row.operator(
"wm.url_open",
text="YouTube Tutorials",
icon='URL'
).url = "https://www.youtube.com/watch?v=ygKJYz4BjRs&list=PLuVvzaanutXcYtWmPVKu2bx83EYNxLRsX"
row = layout.row()
row.operator("wm.url_open", text="Join Our Discord Community!",
icon='URL').url = "https://discord.gg/UpZt5Un57t"

Wyświetl plik

@ -5,11 +5,19 @@ import os
import time
import json
import random
import logging
import traceback
from functools import partial
from . import logic, material_generator, helpers
from .helpers import TextColors
logging.basicConfig(
level=logging.INFO,
format='[%(levelname)s][%(asctime)s]\n%(message)s\n',
datefmt='%Y-%m-%d %H:%M:%S'
)
def generate_nft_dna(
collection_size,
@ -18,7 +26,6 @@ def generate_nft_dna(
logic_file,
enable_materials,
materials_file,
enable_debug
):
"""
Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the dna_list.
@ -95,7 +102,7 @@ def generate_nft_dna(
single_dna = ''.join(single_dna.split('-', 1))
return single_dna
def singleCompleteDNA():
def single_complete_dna():
"""
This function applies Rarity and Logic to a single DNA created by createDNASingle() if Rarity or Logic specified
"""
@ -103,21 +110,25 @@ def generate_nft_dna(
single_dna = ""
if not enable_rarity:
single_dna = create_dna_random(hierarchy)
# print("============")
# print(f"Original DNA: {single_dna}")
logging.debug(f"============\nOriginal DNA: {single_dna}")
print("============")
print(f"Original DNA: {single_dna}")
if enable_rarity:
single_dna = create_dna_rarity(hierarchy)
# print(f"Rarity DNA: {single_dna}")
logging.debug(f"Rarity DNA: {single_dna}")
print(f"Rarity DNA: {single_dna}")
if enable_logic:
single_dna = logic.logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity)
# print(f"Logic DNA: {single_dna}")
logging.debug(f"Logic DNA: {single_dna}")
print(f"Logic DNA: {single_dna}")
if enable_materials:
single_dna = material_generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity)
# print(f"Materials DNA: {single_dna}")
# print("============\n")
logging.debug(f"Materials DNA: {single_dna}\n============\n")
print(f"Materials DNA: {single_dna}")
print("============\n")
return single_dna
@ -129,7 +140,7 @@ def generate_nft_dna(
dna_set_return = set()
for i in range(collection_size):
dna_push_to_list = partial(singleCompleteDNA)
dna_push_to_list = partial(single_complete_dna)
dna_set_return |= {''.join([dna_push_to_list()]) for _ in range(collection_size - len(dna_set_return))}
@ -151,8 +162,6 @@ def generate_nft_dna(
dna_list = create_dna_list()
# Messages:
helpers.raise_warning_collection_size(dna_list, collection_size)
# Data stored in batchDataDictionary:
@ -234,7 +243,8 @@ def send_to_record(
materials_file,
blend_my_nfts_output,
batch_json_save_path,
enable_debug
enable_debug,
log_path
):
"""
Creates NFTRecord.json file and sends "batch_data_dictionary" to it. NFTRecord.json is a permanent record of all DNA
@ -243,6 +253,12 @@ def send_to_record(
repeat DNA.
"""
if enable_debug:
logging.basicConfig(
filename=os.path.join(log_path, "BMNFTS_Log.txt"),
level=logging.DEBUG
)
# Checking Scene is compatible with BMNFTs:
helpers.check_scene()
@ -280,7 +296,6 @@ def send_to_record(
logic_file,
enable_materials,
materials_file,
enable_debug,
)
nft_record_save_path = os.path.join(blend_my_nfts_output, "NFTRecord.json")

Wyświetl plik

@ -96,6 +96,7 @@ def save_generation_state(input):
"receiver_to": input.receiver_to,
"enable_debug": input.enable_debug,
"log_path": input.log_path,
"custom_fields": input.custom_fields,
},
@ -258,9 +259,9 @@ def render_and_save_nfts(input):
except KeyError:
raise TypeError(
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes to "
f"your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read your "
f"scene. For more information see:{TextColors.RESET}"
f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes "
f"to your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read "
f"your scene. For more information see:{TextColors.RESET}"
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
@ -354,7 +355,7 @@ def render_and_save_nfts(input):
if not os.path.exists(animation_folder):
os.makedirs(animation_folder)
if input.animation_file_format =="MP4":
if input.animation_file_format == 'MP4':
bpy.context.scene.render.filepath = animation_path
bpy.context.scene.render.image_settings.file_format = "FFMPEG"
@ -362,7 +363,7 @@ def render_and_save_nfts(input):
bpy.context.scene.render.ffmpeg.codec = 'H264'
bpy.ops.render.render(animation=True)
elif input.animation_file_format =='PNG':
elif input.animation_file_format == 'PNG':
if not os.path.exists(animation_path):
os.makedirs(animation_path)
@ -370,7 +371,7 @@ def render_and_save_nfts(input):
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
bpy.ops.render.render(animation=True)
elif input.animation_file_format =='TIFF':
elif input.animation_file_format == 'TIFF':
if not os.path.exists(animation_path):
os.makedirs(animation_path)
@ -421,7 +422,7 @@ def render_and_save_nfts(input):
# if obj.name in remove_objects:
# obj.select_set(False)
if input.model_file_format =='GLB':
if input.model_file_format == 'GLB':
check_failed_exists(f"{model_path}.glb")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}.glb",
@ -430,7 +431,7 @@ def render_and_save_nfts(input):
export_keep_originals=True,
use_selection=True
)
if input.model_file_format =='GLTF_SEPARATE':
if input.model_file_format == 'GLTF_SEPARATE':
check_failed_exists(f"{model_path}.gltf")
check_failed_exists(f"{model_path}.bin")
bpy.ops.export_scene.gltf(
@ -440,7 +441,7 @@ def render_and_save_nfts(input):
export_keep_originals=True,
use_selection=True
)
if input.model_file_format =='GLTF_EMBEDDED':
if input.model_file_format == 'GLTF_EMBEDDED':
check_failed_exists(f"{model_path}.gltf")
bpy.ops.export_scene.gltf(
filepath=f"{model_path}.gltf",
@ -449,35 +450,35 @@ def render_and_save_nfts(input):
export_keep_originals=True,
use_selection=True
)
elif input.model_file_format =='FBX':
elif input.model_file_format == 'FBX':
check_failed_exists(f"{model_path}.fbx")
bpy.ops.export_scene.fbx(
filepath=f"{model_path}.fbx",
check_existing=True,
use_selection=True
)
elif input.model_file_format =='OBJ':
elif input.model_file_format == 'OBJ':
check_failed_exists(f"{model_path}.obj")
bpy.ops.export_scene.obj(
filepath=f"{model_path}.obj",
check_existing=True,
use_selection=True,
)
elif input.model_file_format =='X3D':
elif input.model_file_format == 'X3D':
check_failed_exists(f"{model_path}.x3d")
bpy.ops.export_scene.x3d(
filepath=f"{model_path}.x3d",
check_existing=True,
use_selection=True
)
elif input.model_file_format =='STL':
elif input.model_file_format == 'STL':
check_failed_exists(f"{model_path}.stl")
bpy.ops.export_mesh.stl(
filepath=f"{model_path}.stl",
check_existing=True,
use_selection=True
)
elif input.model_file_format =='VOX':
elif input.model_file_format == 'VOX':
check_failed_exists(f"{model_path}.vox")
bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox")
@ -577,7 +578,12 @@ def render_and_save_nfts(input):
batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1,
"Average time per generation": batch_complete_time / x - 1}
batch_info_folder = os.path.join(input.nft_batch_save_path, "Batch" + str(input.batch_to_generate), "batch_info.json")
batch_info_folder = os.path.join(
input.nft_batch_save_path,
"Batch" + str(input.batch_to_generate),
"batch_info.json"
)
save_batch(batch_info, batch_info_folder)
# Send Email that Batch is complete:

Wyświetl plik

@ -10,23 +10,6 @@ from shutil import get_terminal_size
from collections import Counter, defaultdict
# ======== ENABLE DEBUG ======== #
# This section is used for debugging, coding, or general testing purposes.
def enable_debug(enable_debug_bool):
if enable_debug_bool:
import logging
logging.basicConfig(
filename="./log.txt",
level=logging.DEBUG,
format='[%(levelname)s][%(asctime)s]\n%(message)s\n',
datefmt='%Y-%m-%d %H:%M:%S'
)
# ======== CONSTANTS ======== #
# Constants are used for storing or updating constant values that may need to be changes depending on system

Wyświetl plik

@ -29,7 +29,6 @@ def send_to_record(input, reverse_order=False):
"IF": item_list1.split(','),
rule_type: item_list2.split(',')
}
print(rule_type)
num += 1
else:
input.logic_file = {}
@ -42,8 +41,6 @@ def send_to_record(input, reverse_order=False):
"IF": item_list1.split(','),
rule_type: item_list2.split(',')
}
print(rule_type)
num += 1
dna_generator.send_to_record(
@ -58,6 +55,7 @@ def send_to_record(input, reverse_order=False):
input.blend_my_nfts_output,
input.batch_json_save_path,
input.enable_debug,
input.log_path
)

Wyświetl plik

@ -261,7 +261,6 @@ def logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity):
did_reconstruct = False
for rule in logic_file:
# Items from 'IF' key for a given rule
print(logic_file)
if_dict = create_dicts(hierarchy, logic_file[rule]["IF"], "IF")
result_dict_type = ""