Merge pull request #135 from torrinworx/main

Merging latest main branch changes to Logic_v2
pull/136/head
Torrin Leonard 2022-08-10 22:22:39 -04:00 zatwierdzone przez GitHub
commit fe41ea36f5
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
9 zmienionych plików z 837 dodań i 1073 usunięć

Wyświetl plik

@ -1,15 +1,15 @@
bl_info = {
"name": "Blend_My_NFTs",
"author": "Torrin Leonard, This Cozy Studio Inc",
"version": (4, 4, 0),
"version": (4, 0, 2),
"blender": (3, 2, 0),
"location": "View3D",
"description": "A free and opensource Blender add-on that enables you to create thousands of unique images, animations, and 3D models.",
"description": "An open source, free to use Blender add-on that enables you to create thousands of unique images, animations, and 3D models.",
"category": "Development",
}
BMNFTS_VERSION = "v4.4.0"
LAST_UPDATED = "2:25PM, June 18th, 2022"
BMNFTS_VERSION = "v4.0.2"
LAST_UPDATED = "8:19AM, May 31st, 2022"
# ======== Import handling ======== #
@ -23,9 +23,10 @@ import os
import sys
import json
import importlib
from datetime import datetime, timezone
from dataclasses import dataclass
from typing import Any
# "a little hacky bs" - Matthew TheBrochacho ;)
# "a little hacky bs" - matt159 ;)
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
from main import \
@ -34,6 +35,7 @@ from main import \
Exporter, \
get_combinations, \
HeadlessUtil, \
Intermediate, \
loading_animation, \
Logic, \
Material_Generator, \
@ -53,6 +55,7 @@ if "bpy" in locals():
"get_combinations": get_combinations,
"HeadlessUtil": HeadlessUtil,
"loading_animation": loading_animation,
"Intermediate": Intermediate,
"Logic": Logic,
"Material_Generator": Material_Generator,
"Metadata": Metadata,
@ -71,8 +74,101 @@ if "bpy" in locals():
# Used for updating text and buttons in UI panels
combinations: int = 0
recommended_limit: int = 0
dt = datetime.now(timezone.utc).astimezone() # Date Time in UTC local
@dataclass
class BMNFTData:
nftName: str
save_path: str
nftsPerBatch: int
batchToGenerate: int
collectionSize: int
Blend_My_NFTs_Output: str
batch_json_save_path: str
nftBatch_save_path: str
enableImages: bool
imageFileFormat: str
enableAnimations: bool
animationFileFormat: str
enableModelsBlender: bool
modelFileFormat: str
enableCustomFields: bool
cardanoMetaDataBool: bool
solanaMetaDataBool: bool
erc721MetaData: bool
cardano_description: str
solana_description: str
erc721_description: str
enableMaterials: bool
materialsFile: str
enableLogic: bool
enable_Logic_Json: bool
logicFile: str
enableRarity: bool
custom_Fields: dict = None
fail_state: Any = False
failed_batch: Any = None
failed_dna: Any = None
failed_dna_index: Any = None
def __post_init__(self):
self.custom_Fields = {}
def getBMNFTData():
_save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)
_Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path)
data = BMNFTData (
nftName = bpy.context.scene.input_tool.nftName,
save_path = _save_path,
nftsPerBatch = bpy.context.scene.input_tool.nftsPerBatch,
batchToGenerate = bpy.context.scene.input_tool.batchToGenerate,
collectionSize = bpy.context.scene.input_tool.collectionSize,
enableRarity = bpy.context.scene.input_tool.enableRarity,
Blend_My_NFTs_Output = _Blend_My_NFTs_Output,
batch_json_save_path = _batch_json_save_path,
nftBatch_save_path = _nftBatch_save_path,
enableLogic = bpy.context.scene.input_tool.enableLogic,
enable_Logic_Json = bpy.context.scene.input_tool.enable_Logic_Json,
logicFile = bpy.context.scene.input_tool.logicFile,
enableImages = bpy.context.scene.input_tool.imageBool,
imageFileFormat = bpy.context.scene.input_tool.imageEnum,
enableAnimations = bpy.context.scene.input_tool.animationBool,
animationFileFormat = bpy.context.scene.input_tool.animationEnum,
enableModelsBlender = bpy.context.scene.input_tool.modelBool,
modelFileFormat = bpy.context.scene.input_tool.modelEnum,
enableCustomFields = bpy.context.scene.input_tool.enableCustomFields,
cardanoMetaDataBool = bpy.context.scene.input_tool.cardanoMetaDataBool,
solanaMetaDataBool = bpy.context.scene.input_tool.solanaMetaDataBool,
erc721MetaData = bpy.context.scene.input_tool.erc721MetaData,
cardano_description = bpy.context.scene.input_tool.cardano_description,
solana_description = bpy.context.scene.input_tool.solana_description,
erc721_description = bpy.context.scene.input_tool.erc721_description,
enableMaterials = bpy.context.scene.input_tool.enableMaterials,
materialsFile = bpy.path.abspath(bpy.context.scene.input_tool.materialsFile)
)
return data
@persistent
def Refresh_UI(dummy1, dummy2):
@ -126,32 +222,43 @@ def runAsHeadless():
"""
For use when running from the command line.
"""
# force CUDA device usage with cycles renderer
cprefs = bpy.context.preferences.addons['cycles'].preferences
cprefs.compute_device_type = 'CUDA'
cprefs.get_devices()
print(cprefs.devices.keys())
for key in cprefs.devices.keys():
cprefs.devices[key].use = True
print('Using {} devices for rendering!'.format(cprefs.get_num_gpu_devices()))
def dumpSettings(settings):
output = (
f"nftName={settings.nftName}\n"
f"collectionSize={str(settings.collectionSize)}\n"
f"nftsPerBatch={str(settings.nftsPerBatch)}\n"
f"save_path={settings.save_path}\n"
f"enableRarity={(settings.enableRarity)}\n"
f"enableLogic={str(settings.enableLogic)}\n"
f"imageBool={str(settings.imageBool)}\n"
f"imageEnum={settings.imageEnum}\n"
f"animationBool={str(settings.animationBool)}\n"
f"animationEnum={settings.animationEnum}\n"
f"modelBool={str(settings.modelBool)}\n"
f"modelEnum={settings.modelEnum}\n"
f"batchToGenerate={str(settings.batchToGenerate)}\n"
f"cardanoMetaDataBool={str(settings.cardanoMetaDataBool)}\n"
f"cardano_description={settings.cardano_description}\n"
f"erc721MetaData={str(settings.erc721MetaData)}\n"
f"erc721_description={settings.erc721_description}\n"
f"solanaMetaDataBool={str(settings.solanaMetaDataBool)}\n"
f"solana_description={settings.solana_description}\n"
f"enableCustomFields={str(settings.enableCustomFields)}\n"
f"customfieldsFile={settings.customfieldsFile}\n"
f"enableMaterials={str(settings.customfieldsFile)}\n"
f"materialsFile={settings.materialsFile}\n"
f"nftName={ settings.nftName }\n"
f"collectionSize={ str(settings.collectionSize) }\n"
f"nftsPerBatch={ str(settings.nftsPerBatch) }\n"
f"save_path={ settings.save_path }\n"
f"enableRarity={ (settings.enableRarity) }\n"
f"enableLogic={ str(settings.enableLogic) }\n"
f"imageBool={ str(settings.imageBool) }\n"
f"imageEnum={ settings.imageEnum }\n"
f"animationBool={ str(settings.animationBool) }\n"
f"animationEnum={ settings.animationEnum }\n"
f"modelBool={ str(settings.modelBool) }\n"
f"modelEnum={ settings.modelEnum }\n"
f"batchToGenerate={ str(settings.batchToGenerate) }\n"
f"cardanoMetaDataBool={ str(settings.cardanoMetaDataBool) }\n"
f"cardano_description={ settings.cardano_description }\n"
f"erc721MetaData={ str(settings.erc721MetaData) }\n"
f"erc721_description={ settings.erc721_description }\n"
f"solanaMetaDataBool={ str(settings.solanaMetaDataBool) }\n"
f"solana_description={ settings.solana_description }\n"
f"enableCustomFields={ str(settings.enableCustomFields) }\n"
f"customfieldsFile={ settings.customfieldsFile }\n"
f"enableMaterials={ str(settings.customfieldsFile) }\n"
f"materialsFile={ settings.materialsFile }\n"
)
print(output)
@ -168,29 +275,31 @@ def runAsHeadless():
# print(pairs)
settings.nftName = pairs[0][1]
settings.collectionSize = int(pairs[1][1])
settings.nftsPerBatch = int(pairs[2][1])
settings.save_path = pairs[3][1]
settings.enableRarity = pairs[4][1] == 'True'
settings.enableLogic = pairs[5][1] == 'True'
settings.imageBool = pairs[6][1] == 'True'
settings.imageEnum = pairs[7][1]
settings.animationBool = pairs[8][1] == 'True'
settings.animationEnum = pairs[9][1]
settings.modelBool = pairs[10][1] == 'True'
settings.modelEnum = pairs[11][1]
settings.batchToGenerate = int(pairs[12][1])
settings.cardanoMetaDataBool = pairs[13][1] == 'True'
settings.cardano_description = pairs[14][1]
settings.erc721MetaData = pairs[15][1] == 'True'
settings.erc721_description = pairs[16][1]
settings.solanaMetaDataBool = pairs[17][1] == 'True'
settings.solanaDescription = pairs[18][1]
settings.enableCustomFields = pairs[19][1] == 'True'
settings.customfieldsFile = pairs[20][1]
settings.enableMaterials = pairs[21][1] == 'True'
settings.materialsFile = pairs[22][1]
settings.nftName = pairs[0][1]
settings.collectionSize = int(pairs[1][1])
settings.nftsPerBatch = int(pairs[2][1])
settings.save_path = pairs[3][1]
settings.enableRarity = pairs[4][1] == 'True'
settings.enableLogic = pairs[5][1] == 'True'
settings.enableLogicJson = pairs[6][1] == 'True'
settings.logicFile = pairs[7][1]
settings.imageBool = pairs[8][1] == 'True'
settings.imageEnum = pairs[9][1]
settings.animationBool = pairs[10][1] == 'True'
settings.animationEnum = pairs[11][1]
settings.modelBool = pairs[12][1] == 'True'
settings.modelEnum = pairs[13][1]
settings.batchToGenerate = int(pairs[14][1])
settings.cardanoMetaDataBool = pairs[15][1] == 'True'
settings.cardano_description = pairs[16][1]
settings.erc721MetaData = pairs[17][1] == 'True'
settings.erc721_description = pairs[18][1]
settings.solanaMetaDataBool = pairs[19][1] == 'True'
settings.solanaDescription = pairs[20][1]
settings.enableCustomFields = pairs[21][1] == 'True'
settings.customfieldsFile = pairs[22][1]
settings.enableMaterials = pairs[23][1] == 'True'
settings.materialsFile = pairs[24][1]
if args.save_path:
settings.save_path = args.save_path
@ -198,80 +307,19 @@ def runAsHeadless():
if args.batch_number:
settings.batchToGenerate = args.batch_number
# dumpSettings(settings)
input = getBMNFTData()
if args.batch_data_path:
input.batch_json_save_path = args.batch_data_path
# don't mind me, just copy-pasting code around...
if args.operation == 'create-dna':
nftName = settings.nftName
collectionSize = settings.collectionSize
nftsPerBatch = settings.nftsPerBatch
save_path = bpy.path.abspath(settings.save_path)
logicFile = bpy.path.abspath(settings.logicFile)
enableRarity = settings.enableRarity
enableLogic = settings.enableLogic
enableMaterials = settings.enableMaterials
materialsFile = settings.materialsFile
Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path = make_directories(save_path)
DNA_Generator.send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, enableMaterials,
materialsFile, Blend_My_NFTs_Output, batch_json_save_path)
Intermediate.send_To_Record_JSON(input)
elif args.operation == 'generate-nfts':
nftName = settings.nftName
save_path = bpy.path.abspath(settings.save_path)
batchToGenerate = settings.batchToGenerate
collectionSize = settings.collectionSize
Intermediate.render_and_save_NFTs(input)
Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path = make_directories(save_path)
if args.batch_data_path:
batch_json_save_path = args.batch_data_path;
enableImages = settings.imageBool
imageFileFormat = settings.imageEnum
enableAnimations = settings.animationBool
animationFileFormat = settings.animationEnum
enableModelsBlender = settings.modelBool
modelFileFormat = settings.modelEnum
enableMaterials = settings.enableMaterials
materialsFile = settings.materialsFile
# fail state variables, set to no fail due to resume_failed_batch() Operator in BMNFTS_PT_GenerateNFTs Panel
fail_state = False
failed_batch = None
failed_dna = None
failed_dna_index = None
Exporter.render_and_save_NFTs(nftName, collectionSize, batchToGenerate, batch_json_save_path,
nftBatch_save_path, enableImages,
imageFileFormat, enableAnimations, animationFileFormat, enableModelsBlender,
modelFileFormat, fail_state, failed_batch, failed_dna, failed_dna_index,
enableMaterials, materialsFile
)
elif args.operation == 'refactor-batches':
class refactorData:
save_path = bpy.path.abspath(settings.save_path)
custom_Fields_File = bpy.path.abspath(settings.customfieldsFile)
enableCustomFields = settings.enableCustomFields
cardanoMetaDataBool = settings.cardanoMetaDataBool
solanaMetaDataBool = settings.solanaMetaDataBool
erc721MetaData = settings.erc721MetaData
cardano_description = settings.cardano_description
solana_description = settings.solana_description
erc721_description = settings.erc721_description
Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path = make_directories(save_path)
Refactorer.reformatNFTCollection(refactorData)
Refactorer.reformatNFTCollection(input)
# ======== User input Property Group ======== #
@ -378,21 +426,6 @@ class BMNFTS_PGT_Input_Properties(bpy.types.PropertyGroup):
)
# Other Panel:
enableAutoSave: bpy.props.BoolProperty(name="Auto Save Before Generation", description="Automatically saves your Blender file when 'Generate NFTs & Create Metadata' button is clicked")
# Auto Shutodwn:
enableAutoShutdown: bpy.props.BoolProperty(name="Auto Shutdown", description="Automatically shuts down your computer after a Batch is finished Generating")
specify_timeBool: bpy.props.BoolProperty(name="Shutdown in a Given Amount of Time", description="Wait a given amount of time after a Batch is generated before Automatic Shutdown")
hours: bpy.props.IntProperty(default=0, min=0)
minutes: bpy.props.IntProperty(default=0, min=0)
# Send Batch Complete Email:
emailNotificationBool: bpy.props.BoolProperty(name="Email Notifications", description="Receive Email Notifications from Blender once a batch is finished generating")
sender_from: bpy.props.StringProperty(name="From", default="from@example.com")
email_password: bpy.props.StringProperty(name="Password", subtype='PASSWORD')
receiver_to: bpy.props.StringProperty(name="To", default="to@example.com")
# API Panel properties:
apiKey: bpy.props.StringProperty(name="API Key", subtype='PASSWORD')
@ -410,73 +443,15 @@ class createData(bpy.types.Operator):
name="Reverse Order")
def execute(self, context):
nftName = bpy.context.scene.input_tool.nftName
collectionSize = bpy.context.scene.input_tool.collectionSize
nftsPerBatch = bpy.context.scene.input_tool.nftsPerBatch
save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)
enableRarity = bpy.context.scene.input_tool.enableRarity
enableLogic = bpy.context.scene.input_tool.enableLogic
enable_Logic_Json = bpy.context.scene.input_tool.enable_Logic_Json
logicFile = bpy.path.abspath(bpy.context.scene.input_tool.logicFile)
enableMaterials = bpy.context.scene.input_tool.enableMaterials
materialsFile = bpy.path.abspath(bpy.context.scene.input_tool.materialsFile)
# Handling Custom Fields UIList input:
if enableLogic:
if enable_Logic_Json and logicFile:
logicFile = json.load(open(logicFile))
input = getBMNFTData()
Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path = make_directories(save_path)
DNA_Generator.send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, enableMaterials,
materialsFile, Blend_My_NFTs_Output, batch_json_save_path)
if enable_Logic_Json and not logicFile:
if input.enableLogic:
if input.enable_Logic_Json and not input.logicFile:
self.report({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.")
if not enable_Logic_Json:
scn = context.scene
if self.reverse_order:
logicFile = {}
num = 1
for i in range(scn.logic_fields_index, -1, -1):
item = scn.logic_fields[i]
item_list1 = item.item_list1
rule_type = item.rule_type
item_list2 = item.item_list2
logicFile[f"Rule-{num}"] = {
"Items-1": item_list1.split(','),
"Rule-Type": rule_type,
"Items-2": item_list2.split(',')
}
num += 1
Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path = make_directories(save_path)
DNA_Generator.send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, enableMaterials,
materialsFile, Blend_My_NFTs_Output, batch_json_save_path)
else:
logicFile = {}
num = 1
for item in scn.logic_fields:
item_list1 = item.item_list1
rule_type = item.rule_type
item_list2 = item.item_list2
logicFile[f"Rule-{num}"] = {
"Items-1": item_list1.split(','),
"Rule-Type": rule_type,
"Items-2": item_list2.split(',')
}
num += 1
Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path = make_directories(save_path)
DNA_Generator.send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, enableMaterials,
materialsFile, Blend_My_NFTs_Output, batch_json_save_path)
if not enableLogic:
Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path = make_directories(save_path)
DNA_Generator.send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, enableMaterials,
materialsFile, Blend_My_NFTs_Output, batch_json_save_path)
Intermediate.send_To_Record_JSON(input)
self.report({'INFO'}, f"NFT Data created!")
return {"FINISHED"}
@ -496,76 +471,10 @@ class exportNFTs(bpy.types.Operator):
def execute(self, context):
enableAutoSave = bpy.context.scene.input_tool.enableAutoSave
if enableAutoSave:
bpy.ops.wm.save_mainfile()
class input:
nftName = bpy.context.scene.input_tool.nftName
save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)
batchToGenerate = bpy.context.scene.input_tool.batchToGenerate
collectionSize = bpy.context.scene.input_tool.collectionSize
Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path = make_directories(save_path)
enableImages = bpy.context.scene.input_tool.imageBool
imageFileFormat = bpy.context.scene.input_tool.imageEnum
enableAnimations = bpy.context.scene.input_tool.animationBool
animationFileFormat = bpy.context.scene.input_tool.animationEnum
enableModelsBlender = bpy.context.scene.input_tool.modelBool
modelFileFormat = bpy.context.scene.input_tool.modelEnum
enableCustomFields = bpy.context.scene.input_tool.enableCustomFields
custom_Fields = {}
cardanoMetaDataBool = bpy.context.scene.input_tool.cardanoMetaDataBool
solanaMetaDataBool = bpy.context.scene.input_tool.solanaMetaDataBool
erc721MetaData = bpy.context.scene.input_tool.erc721MetaData
cardano_description = bpy.context.scene.input_tool.cardano_description
solana_description = bpy.context.scene.input_tool.solana_description
erc721_description = bpy.context.scene.input_tool.erc721_description
enableMaterials = bpy.context.scene.input_tool.enableMaterials
materialsFile = bpy.path.abspath(bpy.context.scene.input_tool.materialsFile)
enableAutoShutdown = bpy.context.scene.input_tool.enableAutoShutdown
specify_timeBool = bpy.context.scene.input_tool.specify_timeBool
hours = bpy.context.scene.input_tool.hours
minutes = bpy.context.scene.input_tool.minutes
emailNotificationBool = bpy.context.scene.input_tool.emailNotificationBool
sender_from = bpy.context.scene.input_tool.sender_from
email_password = bpy.context.scene.input_tool.email_password
receiver_to = bpy.context.scene.input_tool.receiver_to
# fail state variables, set to no fail due to resume_failed_batch() Operator in BMNFTS_PT_GenerateNFTs Panel
fail_state = False
failed_batch = None
failed_dna = None
failed_dna_index = None
input = getBMNFTData()
# Handling Custom Fields UIList input:
if input.enableCustomFields:
scn = context.scene
if self.reverse_order:
for i in range(scn.custom_metadata_fields_index, -1, -1):
item = scn.custom_metadata_fields[i]
if item.field_name in list(input.custom_Fields.keys()):
raise ValueError(f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names are unique.")
else:
input.custom_Fields[item.field_name] = item.field_value
else:
for item in scn.custom_metadata_fields:
if item.field_name in list(input.custom_Fields.keys()):
raise ValueError(f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names are unique.")
else:
input.custom_Fields[item.field_name] = item.field_value
Exporter.render_and_save_NFTs(input)
Intermediate.render_and_save_NFTs(input)
self.report({'INFO'}, f"All NFTs generated for batch {input.batchToGenerate}!")
@ -579,42 +488,53 @@ class resume_failed_batch(bpy.types.Operator):
bl_options = {"REGISTER", "UNDO"}
def execute(self, context):
class input:
save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)
batchToGenerate = bpy.context.scene.input_tool.batchToGenerate
_save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)
_Blend_My_NFTs_Output, _batch_json_save_path, _nftBatch_save_path = make_directories(_save_path)
Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path = make_directories(save_path)
file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batchToGenerate))
batch = json.load(open(file_name))
_batchToGenerate = bpy.context.scene.input_tool.batchToGenerate
nftName = batch["Generation Save"][-1]["Render_Settings"]["nftName"]
collectionSize = batch["Generation Save"][-1]["Render_Settings"]["collectionSize"]
nftBatch_save_path = batch["Generation Save"][-1]["Render_Settings"]["nftBatch_save_path"]
file_name = os.path.join(_batch_json_save_path, "Batch{}.json".format(_batchToGenerate))
batchData = json.load(open(file_name))
enableImages = batch["Generation Save"][-1]["Render_Settings"]["enableImages"]
imageFileFormat = batch["Generation Save"][-1]["Render_Settings"]["imageFileFormat"]
_fail_state, _failed_batch, _failed_dna, _failed_dna_index = Checks.check_FailedBatches(_batch_json_save_path)
enableAnimations = batch["Generation Save"][-1]["Render_Settings"]["enableAnimations"]
animationFileFormat = batch["Generation Save"][-1]["Render_Settings"]["animationFileFormat"]
input = BMNFTData (
nftName = batchData["Generation Save"][-1]["Render_Settings"]["nftName"],
save_path = _save_path,
collectionSize = batchData["Generation Save"][-1]["Render_Settings"]["collectionSize"],
enableModelsBlender = batch["Generation Save"][-1]["Render_Settings"]["enableModelsBlender"]
modelFileFormat = batch["Generation Save"][-1]["Render_Settings"]["modelFileFormat"]
Blend_My_NFTs_Output = _Blend_My_NFTs_Output,
batch_json_save_path = _batch_json_save_path,
nftBatch_save_path = batchData["Generation Save"][-1]["Render_Settings"]["nftBatch_save_path"],
enableCustomFields = batch["Generation Save"][-1]["Render_Settings"]["enableCustomFields"]
custom_Fields = batch["Generation Save"][-1]["Render_Settings"]["custom_Fields"]
enableImages = batchData["Generation Save"][-1]["Render_Settings"]["enableImages"],
imageFileFormat = batchData["Generation Save"][-1]["Render_Settings"]["imageFileFormat"],
cardanoMetaDataBool = batch["Generation Save"][-1]["Render_Settings"]["cardanoMetaDataBool"]
solanaMetaDataBool = batch["Generation Save"][-1]["Render_Settings"]["solanaMetaDataBool"]
erc721MetaData = batch["Generation Save"][-1]["Render_Settings"]["erc721MetaData"]
enableAnimations = batchData["Generation Save"][-1]["Render_Settings"]["enableAnimations"],
animationFileFormat = batchData["Generation Save"][-1]["Render_Settings"]["animationFileFormat"],
cardano_description = batch["Generation Save"][-1]["Render_Settings"]["cardano_description"]
solana_description = batch["Generation Save"][-1]["Render_Settings"]["solana_description"]
erc721_description = batch["Generation Save"][-1]["Render_Settings"]["erc721_description"]
enableModelsBlender = batchData["Generation Save"][-1]["Render_Settings"]["enableModelsBlender"],
modelFileFormat = batchData["Generation Save"][-1]["Render_Settings"]["modelFileFormat"],
enableMaterials = batch["Generation Save"][-1]["Render_Settings"]["enableMaterials"]
materialsFile = batch["Generation Save"][-1]["Render_Settings"]["materialsFile"]
enableCustomFields = batchData["Generation Save"][-1]["Render_Settings"]["enableCustomFields"],
custom_Fields = batchData["Generation Save"][-1]["Render_Settings"]["custom_Fields"],
fail_state, failed_batch, failed_dna, failed_dna_index = Checks.check_FailedBatches(batch_json_save_path)
cardanoMetaDataBool = batchData["Generation Save"][-1]["Render_Settings"]["cardanoMetaDataBool"],
solanaMetaDataBool = batchData["Generation Save"][-1]["Render_Settings"]["solanaMetaDataBool"],
erc721MetaData = batchData["Generation Save"][-1]["Render_Settings"]["erc721MetaData"],
cardano_description = batchData["Generation Save"][-1]["Render_Settings"]["cardano_description"],
solana_description = batchData["Generation Save"][-1]["Render_Settings"]["solana_description"],
erc721_description = batchData["Generation Save"][-1]["Render_Settings"]["erc721_description"],
enableMaterials = batchData["Generation Save"][-1]["Render_Settings"]["enableMaterials"],
materialsFile = batchData["Generation Save"][-1]["Render_Settings"]["materialsFile"],
fail_state = _fail_state,
failed_batch = _failed_batch,
failed_dna = _failed_dna,
failed_dna_index = _failed_dna_index
)
Exporter.render_and_save_NFTs(input)
@ -635,25 +555,8 @@ class refactor_Batches(bpy.types.Operator):
name="Reverse Order")
def execute(self, context):
class input:
save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)
enableCustomFields = bpy.context.scene.input_tool.enableCustomFields
custom_Fields = {}
cardanoMetaDataBool = bpy.context.scene.input_tool.cardanoMetaDataBool
solanaMetaDataBool = bpy.context.scene.input_tool.solanaMetaDataBool
erc721MetaData = bpy.context.scene.input_tool.erc721MetaData
cardano_description = bpy.context.scene.input_tool.cardano_description
solana_description = bpy.context.scene.input_tool.solana_description
erc721_description = bpy.context.scene.input_tool.erc721_description
Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path = make_directories(save_path)
# Passing info to main functions for refactoring:
Refactorer.reformatNFTCollection(input)
Refactorer.reformatNFTCollection(getBMNFTData())
return {"FINISHED"}
def invoke(self, context, event):
@ -679,49 +582,51 @@ class export_settings(bpy.types.Operator):
"#when running Blend_My_NFTs in a headless environment.\n"
"\n"
"#The name of your nft project\n"
f"nftName={settings.nftName}\n"
f"nftName={ settings.nftName }\n"
"\n"
"#NFT Collection Size\n"
f"collectionSize={settings.collectionSize}\n"
f"collectionSize={ settings.collectionSize }\n"
"\n"
"#The number of NFTs to generate per batch\n"
f"nftsPerBatch={str(settings.nftsPerBatch)}\n"
f"nftsPerBatch={ str(settings.nftsPerBatch) }\n"
"\n"
"#Save path for your NFT files\n"
f"save_path={settings.save_path}\n"
f"save_path={ settings.save_path }\n"
"\n"
"#Enable Rarity\n"
f"enableRarity={(settings.enableRarity)}\n"
f"enableRarity={ (settings.enableRarity) }\n"
"\n"
"#Enable Logic\n"
f"enableLogic={str(settings.enableLogic)}\n"
f"enableLogic={ str(settings.enableLogic) }\n"
f"enableLogicJson={ str(settings.enable_Logic_Json) }\n"
f"logicFilePath={ settings.logicFile }\n"
"\n"
"#NFT Media output type(s):\n"
f"imageBool={str(settings.imageBool)}\n"
f"imageEnum={settings.imageEnum}\n"
f"animationBool={str(settings.animationBool)}\n"
f"animationEnum={settings.animationEnum}\n"
f"modelBool={str(settings.modelBool)}\n"
f"modelEnum={settings.modelEnum}\n"
f"imageBool={ str(settings.imageBool) }\n"
f"imageEnum={ settings.imageEnum }\n"
f"animationBool={ str(settings.animationBool) }\n"
f"animationEnum={ settings.animationEnum }\n"
f"modelBool={ str(settings.modelBool) }\n"
f"modelEnum={ settings.modelEnum }\n"
"\n"
"#Batch to generate\n"
f"batchToGenerate={str(settings.batchToGenerate)}\n"
f"batchToGenerate={ str(settings.batchToGenerate) }\n"
"\n"
"#Metadata Format\n"
f"cardanoMetaDataBool={str(settings.cardanoMetaDataBool)}\n"
f"cardano_description={settings.cardano_description}\n"
f"erc721MetaData={str(settings.erc721MetaData)}\n"
f"erc721_description={settings.erc721_description}\n"
f"solanaMetaDataBool={str(settings.solanaMetaDataBool)}\n"
f"solana_description={settings.solana_description}\n"
f"cardanoMetaDataBool={ str(settings.cardanoMetaDataBool) }\n"
f"cardano_description={ settings.cardano_description }\n"
f"erc721MetaData={ str(settings.erc721MetaData) }\n"
f"erc721_description={ settings.erc721_description }\n"
f"solanaMetaDataBool={ str(settings.solanaMetaDataBool) }\n"
f"solana_description={ settings.solana_description }\n"
"\n"
"#Enable Custom Fields\n"
f"enableCustomFields={str(settings.enableCustomFields)}\n"
f"customfieldsFile={settings.customfieldsFile}\n"
f"enableCustomFields={ str(settings.enableCustomFields) }\n"
f"customfieldsFile={ settings.customfieldsFile }\n"
"\n"
"#Enable Materials\n"
f"enableMaterials={str(settings.enableMaterials)}\n"
f"materialsFile={settings.materialsFile}\n"
f"enableMaterials={ str(settings.enableMaterials) }\n"
f"materialsFile={ settings.materialsFile }\n"
)
print(output, file=config)
@ -956,57 +861,10 @@ class BMNFTS_PT_Other(bpy.types.Panel):
input_tool_scene = scene.input_tool
"""
Other:
A place to store miscellaneous settings, features, and external links that the user may find useful but doesn't
want to get in the way of their work flow.
Export Settings:
This panel gives the user the option to export all settings from the Blend_My_NFTs addon into a config file. Settings
will be read from the config file when running heedlessly.
"""
row = layout.row()
row.prop(input_tool_scene, "enableAutoSave")
# Auto Shutdown:
row = layout.row()
row.prop(input_tool_scene, "enableAutoShutdown")
row.label(text="*Must Run Blender as Admin")
if bpy.context.scene.input_tool.enableAutoShutdown:
row = layout.row()
row.prop(input_tool_scene, "specify_timeBool")
time_row1 = layout.row()
time_row1.label(text=f"Hours")
time_row1.prop(input_tool_scene, "hours", text="")
time_row2 = layout.row()
time_row2.label(text=f"Minutes")
time_row2.prop(input_tool_scene, "minutes", text="")
if not bpy.context.scene.input_tool.specify_timeBool:
time_row1.enabled = False
time_row2.enabled = False
else:
time_row1.enabled = True
time_row2.enabled = True
layout.separator()
row = layout.row()
row.prop(input_tool_scene, "emailNotificationBool")
row.label(text="*Windows 10+ only")
if bpy.context.scene.input_tool.emailNotificationBool:
row = layout.row()
row.prop(input_tool_scene, "sender_from")
row = layout.row()
row.prop(input_tool_scene, "email_password")
layout.separator()
row = layout.row()
row.prop(input_tool_scene, "receiver_to")
layout.separator()
layout.label(text=f"Running Blend_My_NFTs Headless:")
save_path = bpy.path.abspath(bpy.context.scene.input_tool.save_path)

Wyświetl plik

@ -2,8 +2,7 @@
# This file is for storing or updating constant values that may need to be changes depending on system requirements and
# different usecases.
import os
import json
import platform
removeList = [".gitignore", ".DS_Store", "desktop.ini", ".ini"]
@ -35,18 +34,3 @@ class bcolors:
ERROR = '\033[91m' # RED
RESET = '\033[0m' # RESET COLOR
def save_result(result):
"""
Saves json result to json file at the specified path.
"""
file_name = "log.json"
if platform.system() == "Linux" or platform.system() == "Darwin":
path = os.path.join(os.path.join(os.path.expanduser('~')), 'Desktop', file_name)
if platform.system() == "Windows":
path = os.path.join(os.environ["HOMEPATH"], "Desktop", file_name)
data = json.dumps(result, indent=1, ensure_ascii=True)
with open(path, 'w') as outfile:
outfile.write(data + '\n')

Wyświetl plik

@ -15,354 +15,375 @@ from .Constants import bcolors, removeList, remove_file_by_extension
def get_hierarchy():
"""
Returns the hierarchy of a given Blender scene.
"""
"""
Returns the hierarchy of a given Blender scene.
"""
coll = bpy.context.scene.collection
coll = bpy.context.scene.collection
scriptIgnoreCollection = bpy.data.collections["Script_Ignore"]
scriptIgnoreCollection = bpy.data.collections["Script_Ignore"]
listAllCollInScene = []
listAllCollections = []
listAllCollInScene = []
listAllCollections = []
def traverse_tree(t):
yield t
for child in t.children:
yield from traverse_tree(child)
def traverse_tree(t):
yield t
for child in t.children:
yield from traverse_tree(child)
for c in traverse_tree(coll):
listAllCollInScene.append(c)
for c in traverse_tree(coll):
listAllCollInScene.append(c)
for i in listAllCollInScene:
listAllCollections.append(i.name)
for i in listAllCollInScene:
listAllCollections.append(i.name)
listAllCollections.remove(scriptIgnoreCollection.name)
listAllCollections.remove(scriptIgnoreCollection.name)
if "Scene Collection" in listAllCollections:
listAllCollections.remove("Scene Collection")
if "Scene Collection" in listAllCollections:
listAllCollections.remove("Scene Collection")
if "Master Collection" in listAllCollections:
listAllCollections.remove("Master Collection")
if "Master Collection" in listAllCollections:
listAllCollections.remove("Master Collection")
def allScriptIgnore(scriptIgnoreCollection):
# Removes all collections, sub collections in Script_Ignore collection from listAllCollections.
def allScriptIgnore(scriptIgnoreCollection):
# Removes all collections, sub collections in Script_Ignore collection from listAllCollections.
for coll in list(scriptIgnoreCollection.children):
listAllCollections.remove(coll.name)
listColl = list(coll.children)
if len(listColl) > 0:
allScriptIgnore(coll)
for coll in list(scriptIgnoreCollection.children):
listAllCollections.remove(coll.name)
listColl = list(coll.children)
if len(listColl) > 0:
allScriptIgnore(coll)
allScriptIgnore(scriptIgnoreCollection)
listAllCollections.sort()
allScriptIgnore(scriptIgnoreCollection)
listAllCollections.sort()
exclude = ["_"] # Excluding characters that identify a Variant
attributeCollections = copy.deepcopy(listAllCollections)
exclude = ["_"] # Excluding characters that identify a Variant
attributeCollections = copy.deepcopy(listAllCollections)
def filter_num():
"""
This function removes items from 'attributeCollections' if they include values from the 'exclude' variable.
It removes child collections from the parent collections in from the "listAllCollections" list.
"""
for x in attributeCollections:
if any(a in x for a in exclude):
attributeCollections.remove(x)
def filter_num():
"""
This function removes items from 'attributeCollections' if they include values from the 'exclude' variable.
It removes child collections from the parent collections in from the "listAllCollections" list.
"""
for x in attributeCollections:
if any(a in x for a in exclude):
attributeCollections.remove(x)
for i in range(len(listAllCollections)):
filter_num()
for i in range(len(listAllCollections)):
filter_num()
attributeVariants = [x for x in listAllCollections if x not in attributeCollections]
attributeCollections1 = copy.deepcopy(attributeCollections)
attributeVariants = [x for x in listAllCollections if x not in attributeCollections]
attributeCollections1 = copy.deepcopy(attributeCollections)
def attributeData(attributeVariants):
"""
def attributeData(attributeVariants):
"""
Creates a dictionary of each attribute
"""
allAttDataList = {}
for i in attributeVariants:
# Check if name follows naming conventions:
if int(i.count("_")) > 2 and int(i.split("_")[1]) > 0:
raise Exception(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
allAttDataList = {}
for i in attributeVariants:
# Check if name follows naming conventions:
if i.count("_") > 2:
raise Exception(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
try:
number = i.split("_")[1]
name = i.split("_")[0]
rarity = i.split("_")[2]
except IndexError:
raise Exception(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
def getName(i):
"""
Returns the name of "i" attribute variant
"""
allAttDataList[i] = {"name": name, "number": number, "rarity": rarity}
return allAttDataList
name = i.split("_")[0]
variantMetaData = attributeData(attributeVariants)
return name
hierarchy = {}
for i in attributeCollections1:
colParLong = list(bpy.data.collections[str(i)].children)
colParShort = {}
for x in colParLong:
colParShort[x.name] = None
hierarchy[i] = colParShort
def getOrder_rarity(i):
"""
Returns the "order" and "rarity" (if enabled) of i attribute variant in a list
"""
x = re.sub(r'[a-zA-Z]', "", i)
a = x.split("_")
del a[0]
return list(a)
for a in hierarchy:
for b in hierarchy[a]:
for x in variantMetaData:
if str(x) == str(b):
(hierarchy[a])[b] = variantMetaData[x]
name = getName(i)
orderRarity = getOrder_rarity(i)
return hierarchy
try:
number = orderRarity[0]
except:
raise Exception(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
try:
rarity = orderRarity[1]
except:
raise Exception(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
eachObject = {"name": name, "number": number, "rarity": rarity}
allAttDataList[i] = eachObject
return allAttDataList
variantMetaData = attributeData(attributeVariants)
hierarchy = {}
for i in attributeCollections1:
colParLong = list(bpy.data.collections[str(i)].children)
colParShort = {}
for x in colParLong:
colParShort[x.name] = None
hierarchy[i] = colParShort
for a in hierarchy:
for b in hierarchy[a]:
for x in variantMetaData:
if str(x) == str(b):
(hierarchy[a])[b] = variantMetaData[x]
return hierarchy
def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials, materialsFile):
"""
"""
Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the DNAList.
"""
hierarchy = get_hierarchy()
hierarchy = get_hierarchy()
# DNA random, Rarity and Logic methods:
DataDictionary = {}
# DNA random, Rarity and Logic methods:
DataDictionary = {}
def createDNArandom(hierarchy):
"""Creates a single DNA randomly without Rarity or Logic."""
dnaStr = ""
dnaStrList = []
listOptionVariant = []
def createDNArandom():
"""Creates a single DNA randomly without Rarity or Logic."""
dnaStr = ""
dnaStrList = []
listOptionVariant = []
for i in hierarchy:
numChild = len(hierarchy[i])
possibleNums = list(range(1, numChild + 1))
listOptionVariant.append(possibleNums)
for i in hierarchy:
numChild = len(hierarchy[i])
possibleNums = list(range(1, numChild + 1))
listOptionVariant.append(possibleNums)
for i in listOptionVariant:
randomVariantNum = random.choices(i, k=1)
str1 = ''.join(str(e) for e in randomVariantNum)
dnaStrList.append(str1)
for i in listOptionVariant:
randomVariantNum = random.choices(i, k=1)
str1 = ''.join(str(e) for e in randomVariantNum)
dnaStrList.append(str1)
for i in dnaStrList:
num = "-" + str(i)
dnaStr += num
for i in dnaStrList:
num = "-" + str(i)
dnaStr += num
dna = ''.join(dnaStr.split('-', 1))
dna = ''.join(dnaStr.split('-', 1))
return str(dna)
return str(dna)
def singleCompleteDNA():
"""
This function applies Rarity and Logic to a single DNA created by createDNASingle() if Rarity or Logic specified
"""
def singleCompleteDNA():
"""This function applies Rarity and Logic to a single DNA created by createDNASingle() if Rarity or Logic specified"""
singleDNA = ""
# Comments for debugging random, rarity, logic, and materials.
if not enableRarity:
singleDNA = createDNArandom()
# print("============")
if enableRarity:
singleDNA = Rarity.createDNArarity(hierarchy)
# print(f"Rarity DNA: {singleDNA}")
singleDNA = ""
# Comments for debugging random, rarity, logic, and materials.
if not enableRarity:
singleDNA = createDNArandom(hierarchy)
# print("============")
# print(f"Original DNA: {singleDNA}")
if enableRarity:
singleDNA = Rarity.createDNArarity(hierarchy)
# print(f"Rarity DNA: {singleDNA}")
if enableLogic:
singleDNA = Logic.logicafyDNAsingle(hierarchy, singleDNA, logicFile)
# print(f"Original DNA: {singleDNA}")
# print("============\n")
if enableLogic:
singleDNA = Logic.logicafyDNAsingle(hierarchy, singleDNA, logicFile, enableRarity, enableMaterials)
# print(f"Logic DNA: {singleDNA}")
if enableMaterials:
singleDNA = Material_Generator.apply_materials(hierarchy, singleDNA, materialsFile)
# print(f"Materials DNA: {singleDNA}")
# print("============\n")
if enableMaterials:
singleDNA = Material_Generator.apply_materials(hierarchy, singleDNA, materialsFile, enableRarity)
# print(f"Materials DNA: {singleDNA}")
return singleDNA
# print("============\n")
def create_DNAList():
"""Creates DNAList. Loops through createDNARandom() and applies Rarity, and Logic while checking if all DNA are unique"""
DNASetReturn = set()
return singleDNA
for i in range(collectionSize):
dnaPushToList = partial(singleCompleteDNA)
def create_DNAList():
"""Creates DNAList. Loops through createDNARandom() and applies Rarity, and Logic while checking if all DNA are unique"""
DNASetReturn = set()
DNASetReturn |= {''.join([dnaPushToList()]) for _ in range(collectionSize - len(DNASetReturn))}
for i in range(collectionSize):
dnaPushToList = partial(singleCompleteDNA)
DNAListUnformatted = list(DNASetReturn)
DNASetReturn |= {''.join([dnaPushToList()]) for _ in range(collectionSize - len(DNASetReturn))}
DNAListFormatted = []
DNA_Counter = 1
for i in DNAListUnformatted:
DNAListFormatted.append({
i: {
"Complete": False,
"Order_Num": DNA_Counter
}
})
DNAListUnformatted = list(DNASetReturn)
DNA_Counter += 1
DNAListFormatted = []
DNA_Counter = 1
for i in DNAListUnformatted:
DNAListFormatted.append({
i: {
"Complete": False,
"Order_Num": DNA_Counter
}
})
return DNAListFormatted
DNA_Counter += 1
DNAList = create_DNAList()
return DNAListFormatted
# Messages:
DNAList = create_DNAList()
Checks.raise_Warning_collectionSize(DNAList, collectionSize)
# Messages:
Checks.raise_Warning_collectionSize(DNAList, collectionSize)
# Data stored in batchDataDictionary:
DataDictionary["numNFTsGenerated"] = len(DNAList)
DataDictionary["hierarchy"] = hierarchy
DataDictionary["DNAList"] = DNAList
return DataDictionary
# Data stored in batchDataDictionary:
DataDictionary["numNFTsGenerated"] = len(DNAList)
DataDictionary["hierarchy"] = hierarchy
DataDictionary["DNAList"] = DNAList
return DataDictionary
def makeBatches(collectionSize, nftsPerBatch, save_path, batch_json_save_path):
"""
"""
Sorts through all the batches and outputs a given number of batches depending on collectionSize and nftsPerBatch.
These files are then saved as Batch#.json files to batch_json_save_path
"""
# Clears the Batch Data folder of Batches:
batchList = os.listdir(batch_json_save_path)
if batchList:
for i in batchList:
batch = os.path.join(batch_json_save_path, i)
if os.path.exists(batch):
os.remove(
os.path.join(batch_json_save_path, i)
)
# Clears the Batch Data folder of Batches:
batchList = os.listdir(batch_json_save_path)
if batchList:
for i in batchList:
batch = os.path.join(batch_json_save_path, i)
if os.path.exists(batch):
os.remove(
os.path.join(batch_json_save_path, i)
)
Blend_My_NFTs_Output = os.path.join(save_path, "Blend_My_NFTs Output", "NFT_Data")
NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json")
DataDictionary = json.load(open(NFTRecord_save_path))
Blend_My_NFTs_Output = os.path.join(save_path, "Blend_My_NFTs Output", "NFT_Data")
NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json")
DataDictionary = json.load(open(NFTRecord_save_path))
numNFTsGenerated = DataDictionary["numNFTsGenerated"]
hierarchy = DataDictionary["hierarchy"]
DNAList = DataDictionary["DNAList"]
numNFTsGenerated = DataDictionary["numNFTsGenerated"]
hierarchy = DataDictionary["hierarchy"]
DNAList = DataDictionary["DNAList"]
numBatches = collectionSize // nftsPerBatch
remainder_dna = collectionSize % nftsPerBatch
if remainder_dna > 0:
numBatches += 1
numBatches = collectionSize // nftsPerBatch
remainder_dna = collectionSize % nftsPerBatch
if remainder_dna > 0:
numBatches += 1
print(f"To generate batches of {nftsPerBatch} DNA sequences per batch, with a total of {numNFTsGenerated}"
f" possible NFT DNA sequences, the number of batches generated will be {numBatches}")
print(f"To generate batches of {nftsPerBatch} DNA sequences per batch, with a total of {numNFTsGenerated}"
f" possible NFT DNA sequences, the number of batches generated will be {numBatches}")
batches_dna_list = []
batches_dna_list = []
for i in range(numBatches):
BatchDNAList = []
if i != range(numBatches)[-1]:
BatchDNAList = list(DNAList[0:nftsPerBatch])
batches_dna_list.append(BatchDNAList)
for i in range(numBatches):
BatchDNAList = []
if i != range(numBatches)[-1]:
BatchDNAList = list(DNAList[0:nftsPerBatch])
batches_dna_list.append(BatchDNAList)
DNAList = [x for x in DNAList if x not in BatchDNAList]
else:
BatchDNAList = DNAList
DNAList = [x for x in DNAList if x not in BatchDNAList]
else:
BatchDNAList = DNAList
batchDictionary = {
"NFTs_in_Batch": int(len(BatchDNAList)),
"hierarchy": hierarchy,
"BatchDNAList": BatchDNAList
}
batchDictionary = {
"NFTs_in_Batch": int(len(BatchDNAList)),
"hierarchy": hierarchy,
"BatchDNAList": BatchDNAList
}
batchDictionary = json.dumps(batchDictionary, indent=1, ensure_ascii=True)
with open(os.path.join(batch_json_save_path, f"Batch{i + 1}.json"), "w") as outfile:
outfile.write(batchDictionary)
batchDictionary = json.dumps(batchDictionary, indent=1, ensure_ascii=True)
with open(os.path.join(batch_json_save_path, f"Batch{i + 1}.json"), "w") as outfile:
outfile.write(batchDictionary)
def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, enableMaterials,
materialsFile, Blend_My_NFTs_Output, batch_json_save_path):
"""
"""
Creates NFTRecord.json file and sends "batchDataDictionary" to it. NFTRecord.json is a permanent record of all DNA
you've generated with all attribute variants. If you add new variants or attributes to your .blend file, other scripts
need to reference this .json file to generate new DNA and make note of the new attributes and variants to prevent
repeate DNA.
"""
# Checking Scene is compatible with BMNFTs:
Checks.check_Scene()
# Checking Scene is compatible with BMNFTs:
Checks.check_Scene()
# Messages:
print(
f"\n========================================\n"
f"Creating NFT Data. Generating {collectionSize} NFT DNA.\n"
)
# Messages:
print(
f"\n========================================\n"
f"Creating NFT Data. Generating {collectionSize} NFT DNA.\n"
)
if not enableRarity and not enableLogic:
print(
f"{bcolors.OK}NFT DNA will be determined randomly, no special properties or parameters are applied.\n{bcolors.RESET}")
if not enableRarity and not enableLogic:
print(f"{bcolors.OK}NFT DNA will be determined randomly, no special properties or parameters are applied.\n{bcolors.RESET}")
if enableRarity:
print(f"{bcolors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account.\n{bcolors.RESET}")
if enableRarity:
print(f"{bcolors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account.\n{bcolors.RESET}")
if enableLogic:
print(f"{bcolors.OK}Logic is ON. {len(list(logicFile.keys()))} rules detected and applied.\n{bcolors.RESET}")
if enableLogic:
print(f"{bcolors.OK}Logic is ON. Rules listed in {logicFile} will be taken into account.\n{bcolors.RESET}")
time_start = time.time()
time_start = time.time()
def create_nft_data():
try:
DataDictionary = generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials,
materialsFile)
NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json")
def create_nft_data():
try:
DataDictionary = generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials,
materialsFile)
NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json")
# Checks:
# Checks:
Checks.raise_Warning_maxNFTs(nftsPerBatch, collectionSize)
Checks.check_Duplicates(DataDictionary["DNAList"])
Checks.raise_Error_ZeroCombinations()
Checks.raise_Warning_maxNFTs(nftsPerBatch, collectionSize)
Checks.check_Duplicates(DataDictionary["DNAList"])
Checks.raise_Error_ZeroCombinations()
if enableRarity:
Checks.check_Rarity(DataDictionary["hierarchy"], DataDictionary["DNAList"],
os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data"))
if enableRarity:
Checks.check_Rarity(DataDictionary["hierarchy"], DataDictionary["DNAList"], os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data"))
except FileNotFoundError:
raise FileNotFoundError(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
finally:
loading.stop()
except FileNotFoundError:
raise FileNotFoundError(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
finally:
loading.stop()
try:
ledger = json.dumps(DataDictionary, indent=1, ensure_ascii=True)
with open(NFTRecord_save_path, 'w') as outfile:
outfile.write(ledger + '\n')
try:
ledger = json.dumps(DataDictionary, indent=1, ensure_ascii=True)
with open(NFTRecord_save_path, 'w') as outfile:
outfile.write(ledger + '\n')
print(
f"\n{bcolors.OK}Blend_My_NFTs Success:\n"
f"{len(DataDictionary['DNAList'])} NFT DNA saved to {NFTRecord_save_path}. NFT DNA Successfully created.\n{bcolors.RESET}")
print(
f"\n{bcolors.OK}Blend_My_NFTs Success:\n"
f"{len(DataDictionary['DNAList'])} NFT DNA saved to {NFTRecord_save_path}. NFT DNA Successfully created.\n{bcolors.RESET}")
except:
raise (
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
except:
raise (
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
# Loading Animation:
loading = Loader(f'Creating NFT DNA...', '').start()
create_nft_data()
makeBatches(collectionSize, nftsPerBatch, save_path, batch_json_save_path)
loading.stop()
# Loading Animation:
loading = Loader(f'Creating NFT DNA...', '').start()
create_nft_data()
makeBatches(collectionSize, nftsPerBatch, save_path, batch_json_save_path)
loading.stop()
time_end = time.time()
time_end = time.time()
print(
f"{bcolors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{bcolors.RESET}"
)
print(
f"{bcolors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{bcolors.RESET}"
)

Wyświetl plik

@ -4,12 +4,9 @@
import bpy
import os
import ssl
import time
import json
import smtplib
import datetime
import platform
from .loading_animation import Loader
from .Constants import bcolors, removeList, remove_file_by_extension
from .Metadata import createCardanoMetadata, createSolanaMetaData, createErc721MetaData
@ -44,37 +41,37 @@ def save_generation_state(input):
"Generation Start Date and Time": [CURRENT_TIME, CURRENT_DATE, LOCAL_TIMEZONE],
"Render_Settings": {
"nftName": input.nftName,
"save_path": input.save_path,
"batchToGenerate": input.batchToGenerate,
"collectionSize": input.collectionSize,
"nftName": input.nftName,
"save_path": input.save_path,
"batchToGenerate": input.batchToGenerate,
"collectionSize": input.collectionSize,
"Blend_My_NFTs_Output": input.Blend_My_NFTs_Output,
"batch_json_save_path": input.batch_json_save_path,
"nftBatch_save_path": input.nftBatch_save_path,
"nftBatch_save_path": input.nftBatch_save_path,
"enableImages": input.enableImages,
"imageFileFormat": input.imageFileFormat,
"enableImages": input.enableImages,
"imageFileFormat": input.imageFileFormat,
"enableAnimations": input.enableAnimations,
"animationFileFormat": input.animationFileFormat,
"enableAnimations": input.enableAnimations,
"animationFileFormat": input.animationFileFormat,
"enableModelsBlender": input.enableModelsBlender,
"modelFileFormat": input.modelFileFormat,
"enableModelsBlender": input.enableModelsBlender,
"modelFileFormat": input.modelFileFormat,
"enableCustomFields": input.enableCustomFields,
"custom_Fields": input.custom_Fields,
"enableCustomFields": input.enableCustomFields,
"custom_Fields": input.custom_Fields,
"cardanoMetaDataBool": input.cardanoMetaDataBool,
"solanaMetaDataBool": input.solanaMetaDataBool,
"erc721MetaData": input.erc721MetaData,
"cardanoMetaDataBool": input.cardanoMetaDataBool,
"solanaMetaDataBool": input.solanaMetaDataBool,
"erc721MetaData": input.erc721MetaData,
"cardano_description": input.cardano_description,
"solana_description": input.solana_description,
"erc721_description": input.erc721_description,
"cardano_description": input.cardano_description,
"solana_description": input.solana_description,
"erc721_description": input.erc721_description,
"enableMaterials": input.enableMaterials,
"materialsFile": input.materialsFile,
"enableMaterials": input.enableMaterials,
"materialsFile": input.materialsFile,
},
})
@ -115,20 +112,18 @@ def render_and_save_NFTs(input):
Renders the NFT DNA in a Batch#.json, where # is renderBatch in config.py. Turns off the viewport camera and
the render camera for all items in hierarchy.
"""
print(f"\nFAILED BATCH = {input.failed_batch}\n")
print(f"\nBATCH TO GENERATE = {input.batchToGenerate}\n")
time_start_1 = time.time()
# If failed Batch is detected and user is resuming its generation:
if input.fail_state:
print(f"{bcolors.ERROR}\nResuming Failed Batch {input.failed_batch}\n{bcolors.RESET}")
NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.failed_batch, input.batch_json_save_path)
for a in range(input.failed_dna):
del BatchDNAList[0]
x = input.failed_dna + 1
# If user is generating the normal way:
else:
print(f"\nGenerating Batch {input.batchToGenerate}\n")
NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.batchToGenerate, input.batch_json_save_path)
save_generation_state(input)
x = 1
@ -182,14 +177,12 @@ def render_and_save_NFTs(input):
if hierarchy[attribute][var]['number'] == variant:
variant = var
if material != '0': # If material is not empty
if material != '0':
for variant_m in materialsFile:
if variant == variant_m:
# Getting Materials name from Materials index in the Materials List
materials_list = list(materialsFile[variant_m]["Material List"].keys())
material = materials_list[int(material) - 1] # Subtract 1 because '0' means empty mat
break
for mat in materialsFile[variant_m]["Material List"]:
if mat.split('_')[1] == material:
material = mat
full_dna_dict[variant] = material
@ -257,20 +250,20 @@ def render_and_save_NFTs(input):
time_start_2 = time.time()
# Main paths for batch subfolders:
batchFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate))
batchFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate))
imageFolder = os.path.join(batchFolder, "Images")
animationFolder = os.path.join(batchFolder, "Animations")
modelFolder = os.path.join(batchFolder, "Models")
BMNFT_metaData_Folder = os.path.join(batchFolder, "BMNFT_metadata")
imageFolder = os.path.join(batchFolder, "Images")
animationFolder = os.path.join(batchFolder, "Animations")
modelFolder = os.path.join(batchFolder, "Models")
BMNFT_metaData_Folder = os.path.join(batchFolder, "BMNFT_metadata")
imagePath = os.path.join(imageFolder, name)
animationPath = os.path.join(animationFolder, name)
modelPath = os.path.join(modelFolder, name)
imagePath = os.path.join(imageFolder, name)
animationPath = os.path.join(animationFolder, name)
modelPath = os.path.join(modelFolder, name)
cardanoMetadataPath = os.path.join(batchFolder, "Cardano_metadata")
solanaMetadataPath = os.path.join(batchFolder, "Solana_metadata")
erc721MetadataPath = os.path.join(batchFolder, "Erc721_metadata")
cardanoMetadataPath = os.path.join(batchFolder, "Cardano_metadata")
solanaMetadataPath = os.path.join(batchFolder, "Solana_metadata")
erc721MetadataPath = os.path.join(batchFolder, "Erc721_metadata")
# Generation/Rendering:
if input.enableImages:
@ -420,13 +413,13 @@ def render_and_save_NFTs(input):
if not os.path.exists(solanaMetadataPath):
os.makedirs(solanaMetadataPath)
createSolanaMetaData(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict, input.custom_Fields,
input.enableCustomFields, input.solana_description, solanaMetadataPath)
input.enableCustomFields, input.cardano_description, solanaMetadataPath)
if input.erc721MetaData:
if not os.path.exists(erc721MetadataPath):
os.makedirs(erc721MetadataPath)
createErc721MetaData(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict, input.custom_Fields,
input.enableCustomFields, input.erc721_description, erc721MetadataPath)
input.enableCustomFields, input.cardano_description, erc721MetadataPath)
if not os.path.exists(BMNFT_metaData_Folder):
os.makedirs(BMNFT_metaData_Folder)
@ -464,64 +457,3 @@ def render_and_save_NFTs(input):
batch_infoFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate), "batch_info.json")
save_batch(batch_info, batch_infoFolder)
# Send Email that Batch is complete:
if input.emailNotificationBool:
port = 465 # For SSL
smtp_server = "smtp.gmail.com"
sender_email = input.sender_from # Enter your address
receiver_email = input.receiver_to # Enter receiver address
password = input.email_password
# Get batch info for message:
if input.fail_state:
batch = input.fail_state
batchData = getBatchData(input.failed_batch, input.batch_json_save_path)
else:
batchData = getBatchData(input.batchToGenerate, input.batch_json_save_path)
batch = input.batchToGenerate
generation_time = str(datetime.timedelta(seconds=batch_complete_time))
message = f"""\
Subject: Batch {batch} completed {x - 1} NFTs in {generation_time} (h:m:s)
Generation Time:
{generation_time.split(':')[0]} Hours, {generation_time.split(':')[1]} Minutes, {generation_time.split(':')[2]} Seconds
Batch Data:
{batchData}
This message was sent from an instance of the Blend_My_NFTs Blender add-on.
"""
context = ssl.create_default_context()
with smtplib.SMTP_SSL(smtp_server, port, context=context) as server:
server.login(sender_email, password)
server.sendmail(sender_email, receiver_email, message)
# Automatic Shutdown:
# If user selects automatic shutdown but did not specify time after Batch completion
def shutdown(time):
plateform = platform.system()
if plateform == "Windows":
os.system(f"shutdown /s /t {time}")
if plateform == "Darwin":
os.system(f"shutdown /s /t {time}")
if input.enableAutoShutdown and not input.specify_timeBool:
shutdown(0)
# If user selects automatic shutdown and specify time after Batch completion
if input.enableAutoShutdown and input.specify_timeBool:
hours = (int(input.hours)/60)/60
minutes = int(input.minutes)/60
total_sleep_time = hours + minutes
# time.sleep(total_sleep_time)
shutdown(total_sleep_time)

Wyświetl plik

@ -56,5 +56,12 @@ def getPythonArgs():
required=False,
help="Use pre-existing batch data for rendering"
)
parser.add_argument("--logic-file",
dest="logic_file",
metavar='FILE',
required=False,
help="Overwrite the logic file path in the config file"
)
return (parser.parse_args(argv), parser)

Wyświetl plik

@ -0,0 +1,76 @@
import json
import bpy
from main import DNA_Generator, Exporter
def send_To_Record_JSON(input, reverse_order=False):
if input.enableLogic:
if input.enable_Logic_Json and input.logicFile:
input.logicFile = json.load(open(input.logicFile))
if input.enable_Logic_Json and not input.logicFile:
print({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.")
if not input.enable_Logic_Json:
scn = bpy.context.scene
if reverse_order:
input.logicFile = {}
num = 1
for i in range(scn.logic_fields_index, -1, -1):
item = scn.logic_fields[i]
item_list1 = item.item_list1
rule_type = item.rule_type
item_list2 = item.item_list2
input.logicFile[f"Rule-{num}"] = {
"Items-1": item_list1.split(','),
"Rule-Type": rule_type,
"Items-2": item_list2.split(',')
}
num += 1
else:
input.logicFile = {}
num = 1
for item in scn.logic_fields:
item_list1 = item.item_list1
rule_type = item.rule_type
item_list2 = item.item_list2
input.logicFile[f"Rule-{num}"] = {
"Items-1": item_list1.split(','),
"Rule-Type": rule_type,
"Items-2": item_list2.split(',')
}
num += 1
DNA_Generator.send_To_Record_JSON( input.collectionSize,
input.nftsPerBatch,
input.save_path,
input.enableRarity,
input.enableLogic,
input.logicFile,
input.enableMaterials,
input.materialsFile,
input.Blend_My_NFTs_Output,
input.batch_json_save_path
)
def render_and_save_NFTs(input, reverse_order=False):
if input.enableCustomFields:
scn = bpy.context.scene
if reverse_order:
for i in range(scn.custom_metadata_fields_index, -1, -1):
item = scn.custom_metadata_fields[i]
if item.field_name in list(input.custom_Fields.keys()):
raise ValueError(f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names are unique.")
else:
input.custom_Fields[item.field_name] = item.field_value
else:
for item in scn.custom_metadata_fields:
if item.field_name in list(input.custom_Fields.keys()):
raise ValueError(f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names are unique.")
else:
input.custom_Fields[item.field_name] = item.field_value
Exporter.render_and_save_NFTs(input)

Wyświetl plik

@ -2,286 +2,212 @@
# The purpose of this file is to add logic and rules to the DNA that are sent to the NFTRecord.json file in DNA_Generator.py
import bpy
import json
import random
import collections
from .Constants import bcolors, removeList, remove_file_by_extension, save_result
# Helper Functions
def isAttorVar(hierarchy, items_List):
items_returned = collections.defaultdict(list)
for i in items_List:
for j in hierarchy:
if i == j: # If i is an Attribute, add all i Variants to dictionary.
items_returned[i] = list(hierarchy[j].keys())
items_returned[i].append("Empty")
if i in list(hierarchy[j].keys()):
items_returned[j].append(i)
# Check if all variants in an attribute were included, if so, add "Empty" variant.
for i in items_returned:
if list(items_returned[i]) == list(hierarchy[i].keys()):
items_returned[i].append("Empty")
return dict(items_returned)
def getAttIndex(hierarchy, attribute):
attList = list(hierarchy.keys())
index = attList.index(attribute)
return index
def getVarNum(variant):
if variant == "Empty":
num = '0'
else:
num = variant.split("_")[1]
return num
def items_to_num(items_List):
num_List = {}
for i in items_List:
variant_num_list = []
for j in items_List[i]:
variant_num_list.append(getVarNum(j))
num_List[i] = variant_num_list
return num_List
def rar_selectVar(hierarchy, items_List, deconstructed_DNA):
for attribute in items_List:
a_attribute_index = getAttIndex(hierarchy, attribute)
selected_variants = items_List[attribute]
hierarchy_selected_variants = list(hierarchy[attribute])
left_over_variants = [x for x in hierarchy_selected_variants if x not in selected_variants]
if not left_over_variants:
deconstructed_DNA[int(a_attribute_index)] = "0"
else:
number_List_Of_i = []
rarity_List_Of_i = []
ifZeroBool = None
variantNum = None
for a in left_over_variants:
number = a.split("_")[1]
rarity = a.split("_")[2]
number_List_Of_i.append(int(number))
rarity_List_Of_i.append(float(rarity))
for x in rarity_List_Of_i:
if x == 0:
ifZeroBool = True
elif x != 0:
ifZeroBool = False
if ifZeroBool:
variantNum = random.choices(number_List_Of_i, k=1)
if not ifZeroBool:
variantNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1)
deconstructed_DNA[int(a_attribute_index)] = str(variantNum[0])
return deconstructed_DNA
def reconstructDNA(deconstructedDNA):
reconstructed_DNA = ""
for a in deconstructedDNA:
num = "-" + str(a)
reconstructed_DNA += num
return ''.join(reconstructed_DNA.split('-', 1))
return (''.join(reconstructed_DNA.split('-', 1)))
def get_var_info(variant, hierarchy):
# Get info for variant dict
name = variant.split("_")[0]
order_number = variant.split("_")[1]
rarity_number = variant.split("_")[2]
attribute = ""
def strip_empty_variant(num_list):
"""Strips empty variants if full attribute collection. Used for processing below."""
for i in num_list:
var_list = num_list[i]
if "0" in var_list:
var_list.remove("0")
num_list[i] = var_list
return num_list
for a in hierarchy:
for var in list(hierarchy[a].keys()):
if var == variant:
attribute = a
break
attribute_index = list(hierarchy.keys()).index(attribute)
# Rule Checks:
def never_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
"""Returns True if singleDNA violates Never with Rule stated in Logic.json."""
violates_rule = None
return [name, order_number, rarity_number, attribute, attribute_index] # list of Var info sent back
num_List1 = strip_empty_variant(num_List1)
num_List2 = strip_empty_variant(num_List2)
for a in num_List1:
for b in num_List2:
if str(deconstructed_DNA[getAttIndex(hierarchy, a)]) in num_List1[a] and \
str(deconstructed_DNA[getAttIndex(hierarchy, b)]) in num_List2[b]:
violates_rule = True
return violates_rule
else:
violates_rule = False
return violates_rule
def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type, enableRarity):
# Check if Variants in if_dict are in deconstructed_DNA, if so return if_list_selected = True:
if_list_selected = False
for a in deconstructed_DNA:
attribute_index = deconstructed_DNA.index(a)
attribute = list(hierarchy.keys())[attribute_index]
def only_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
"""Returns True if singleDNA violates Only with Rule stated in Logic.json."""
violates_rule = None
for b in hierarchy[attribute]:
if hierarchy[attribute][b]["number"] == a:
a_dna_var = b
for a in num_List1:
for b in num_List2:
if str(deconstructed_DNA[getAttIndex(hierarchy, a)]) in num_List1[a] and \
str(deconstructed_DNA[getAttIndex(hierarchy, b)]) not in num_List2[b]:
violates_rule = True
return violates_rule
if attribute in if_dict:
if a_dna_var in list(if_dict[attribute].keys()):
if_list_selected = True
else:
violates_rule = False
return violates_rule
def always_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
"""Returns True if singleDNA violates Always with Rule stated in Logic.json."""
violates_rule = None
# Apply changes in accordance to Variants in 'result_dict' and 'if_list_selected' bool above:
for a in deconstructed_DNA:
attribute_index = deconstructed_DNA.index(a)
attribute = list(hierarchy.keys())[attribute_index]
if attribute in result_dict: # Check if Attribute from DNA is in 'result_dict'
# If 'a' is a full Attribute and Variants in if_dict not selected, set 'a' to empty (0):
if list(result_dict[attribute].keys()) == list(hierarchy[attribute].keys()) and not if_list_selected:
deconstructed_DNA[attribute_index] = "0"
# If 'a' is a full Attribute and result_dict_type = "NOT", set 'a' to empty (0):
if list(result_dict[attribute].keys()) == list(
hierarchy[attribute].keys()) and if_list_selected and result_dict_type == "NOT":
deconstructed_DNA[attribute_index] = "0"
# If Variants in if_dict are selected, set each attribute in 'result_dict' to a random or rarity selected Variant from
# 'result_dict[attribute]' variant_list:
if if_list_selected:
# Invert 'items_returned' if 'NOT' rule is selected:
if result_dict_type == "NOT":
for a in result_dict:
var_selected_list = list(result_dict[a].keys()) # list of variants from 'NOT'
att_selected_list = list(hierarchy[a].keys()) # full list of variants from hierarchy attribute
# If 'a' is not a full Attribute, invert the variants:
if len(var_selected_list) != len(att_selected_list):
var_selected_list = [i for i in att_selected_list if i not in var_selected_list]
var_selected_list_complete = {}
for i in var_selected_list:
var_selected_list_complete[i] = get_var_info(i, hierarchy)
result_dict[a] = var_selected_list_complete
for a in result_dict:
attribute_index = list(hierarchy.keys()).index(a)
attribute = list(hierarchy.keys())[attribute_index]
variant_list = list(result_dict[a].keys())
if attribute in result_dict: # Check if Attribute from DNA is in 'then_dict'
number_List_Of_i = []
rarity_List_Of_i = []
ifZeroBool = None
variantNum = None
for b in variant_list:
number = b.split("_")[1]
rarity = b.split("_")[2]
number_List_Of_i.append(int(number))
rarity_List_Of_i.append(float(rarity))
for b in rarity_List_Of_i:
if b == 0:
ifZeroBool = True
elif b != 0:
ifZeroBool = False
if enableRarity:
try:
if ifZeroBool:
variantNum = random.choices(number_List_Of_i, k=1)
elif not ifZeroBool:
variantNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1)
except IndexError:
raise IndexError(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Attribute collection '{a}'. For more information on Blend_My_NFTs compatible scenes, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
else:
try:
variantNum = random.choices(number_List_Of_i, k=1)
except IndexError:
raise IndexError(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Attribute collection '{a}'. For more information on Blend_My_NFTs compatible scenes, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
deconstructed_DNA[int(attribute_index)] = str(variantNum[0])
return deconstructed_DNA
def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type):
# Check if Variants in 'if_dict' found in deconstructed_DNA:
if_bool = False # True if Variant in 'deconstructed_DNA' found in 'if_dict'
for a in if_dict: # Attribute in 'if_dict'
for b in if_dict[a]: # Variant in if_dict[Attribute]
var_order_num = str(if_dict[a][b][1]) # Order number of 'b' (Variant)
dna_order_num = str(
deconstructed_DNA[if_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_DNA
if var_order_num == dna_order_num: # If DNA selected Variants found inside IF list variants:
if_bool = True
break
for a in num_List2:
if str(deconstructed_DNA[getAttIndex(hierarchy, a)]) not in num_List2[a]:
violates_rule = True
return violates_rule
else:
continue
break
# Check if Variants in 'result_dict' found in deconstructed_DNA:
full_att_bool = False
result_bool = False # True if Variant in 'deconstructed_DNA' found in 'result_dict'
for a in result_dict: # Attribute in 'result_dict'
for b in result_dict[a]: # Variant in if_dict[Attribute]
var_order_num = str(result_dict[a][b][1]) # Order number of 'b' (Variant)
dna_order_num = str(
deconstructed_DNA[result_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_DNA
if var_order_num == dna_order_num: # If DNA selected Variants found inside THEN list variants:
if list(result_dict[a].keys()) == list(hierarchy[a].keys()):
full_att_bool = True
result_bool = True
break
else:
continue
break
# Rule Bool return summary:
violates_rule = False
# If Variants in 'if_dict' found in deconstructed_DNA and Variants in 'result_dict' not found in deconstructed_DNA:
if if_bool and not result_bool:
violates_rule = True
elif if_bool and result_bool and result_dict_type == "NOT":
violates_rule = True
# If Variants in 'if_dict' not found in deconstructed_DNA, and 'result_dict' variants are found in deconstructed_DNA,
# and they are a part of a full Attribute in 'then_dict'
elif not if_bool and result_bool and full_att_bool:
violates_rule = True
# If Variants in 'if_dict' not found in deconstructed_DNA, but Variants in 'then_dict' are found in deconstructed_DNA,
# and don't make up a full Attribute:
# elif not if_bool and result_bool and not full_att_bool:
# violates_rule = False
return violates_rule, if_bool, result_bool, full_att_bool
violates_rule = False
return violates_rule
def create_dicts(hierarchy, rule_list_items, result_dict_type):
structure = {
"attribute1": {
"variant1": [
"name",
"order_number",
"rarity_number"
"attribute"
"attribute_index"
],
"variant2": [
"name",
"order_number",
"rarity_number"
"attribute"
"attribute_index"
]
},
"attribute2": {
"variant1": [
"name",
"order_number",
"rarity_number"
"attribute"
"attribute_index"
],
"variant2": [
"name",
"order_number",
"rarity_number"
"attribute"
"attribute_index"
]
}
}
# Main Function
def logicafyDNAsingle(hierarchy, singleDNA, logicFile):
items_returned = collections.defaultdict(dict)
for a in rule_list_items:
for b in hierarchy:
if a == b: # If 'a' is an Attribute, add all 'a' Variants to items_returned dict.
variant_list_of_a = list(hierarchy[a].keys())
variant_dict_of_a = {}
for c in variant_list_of_a:
variant_dict_of_a[c] = get_var_info(c, hierarchy)
items_returned[a] = variant_dict_of_a
if a in list(hierarchy[b].keys()): # If 'a' is a Variant, add all info about that variant to items_returned
items_returned[b][a] = get_var_info(a, hierarchy)
items_returned = dict(items_returned)
return dict(items_returned)
def logicafyDNAsingle(hierarchy, singleDNA, logicFile, enableRarity, enableMaterials):
deconstructed_DNA = singleDNA.split("-")
didReconstruct = True
originalDNA = str(singleDNA)
while didReconstruct:
didReconstruct = False
for rule in logicFile:
# Items from 'IF' key for a given rule
if_dict = create_dicts(hierarchy, logicFile[rule]["IF"], "IF")
items_List1 = isAttorVar(hierarchy, logicFile[rule]["Items-1"])
items_List2 = isAttorVar(hierarchy, logicFile[rule]["Items-2"])
num_List1 = items_to_num(items_List1)
num_List2 = items_to_num(items_List2)
result_dict_type = ""
if "THEN" in logicFile[rule]:
result_dict_type = "THEN"
if logicFile[rule]["Rule-Type"] == "Never With":
if never_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
if "NOT" in logicFile[rule]:
result_dict_type = "NOT"
rand_bool = bool(random.getrandbits(1))
result_dict = create_dicts(hierarchy, logicFile[rule][result_dict_type], result_dict_type)
if rand_bool:
deconstructed_DNA = rar_selectVar(hierarchy, items_List2, deconstructed_DNA)
# Change 'then_bool' to 'result_bool'
violates_rule, if_bool, then_bool, full_att_bool = get_rule_break_type(hierarchy, deconstructed_DNA,
if_dict, result_dict, result_dict_type)
if violates_rule:
# print(f"======={deconstructed_DNA} VIOLATES RULE======")
if not rand_bool:
deconstructed_DNA = rar_selectVar(hierarchy, items_List1, deconstructed_DNA)
deconstructed_DNA = apply_rules_to_dna(
hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type, enableRarity
)
newDNA = reconstructDNA(deconstructed_DNA)
if newDNA != originalDNA:
originalDNA = str(newDNA)
didReconstruct = True
break
newDNA = reconstructDNA(deconstructed_DNA)
if newDNA != originalDNA:
originalDNA = str(newDNA)
didReconstruct = True
break
if logicFile[rule]["Rule-Type"] == "Only With":
if only_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
for b in num_List1:
if "0" in num_List1[b]: # If complete attribute
deconstructed_DNA[getAttIndex(hierarchy, b)] = "0"
if "0" not in num_List1[b]: # Not complete attribute, select from other variants with rarity:
deconstructed_DNA = rar_selectVar(hierarchy, items_List1, deconstructed_DNA)
newDNA = reconstructDNA(deconstructed_DNA)
if newDNA != originalDNA:
originalDNA = str(newDNA)
didReconstruct = True
break
if logicFile[rule]["Rule-Type"] == "Always With":
if always_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
deconstructed_DNA = rar_selectVar(hierarchy, items_List1, deconstructed_DNA)
newDNA = reconstructDNA(deconstructed_DNA)
if newDNA != originalDNA:
originalDNA = str(newDNA)
didReconstruct = True
break
return str(reconstructDNA(deconstructed_DNA))

Wyświetl plik

@ -7,58 +7,36 @@ import bpy
import json
import random
from .Constants import bcolors, removeList, remove_file_by_extension, save_result
def select_material(materialList, variant, enableRarity):
def select_material(materialList):
"""Selects a material from a passed material list. """
material_List_Of_i = [] # List of Material names instead of order numbers
number_List_Of_i = []
rarity_List_Of_i = []
ifZeroBool = None
for material in materialList:
# Material Order Number comes from index in the Material List in materials.json for a given Variant.
# material_order_num = list(materialList.keys()).index(material)
material_List_Of_i.append(material)
material_order_num = material.split("_")[1]
number_List_Of_i.append(material_order_num)
material_rarity_percent = materialList[material]
material_rarity_percent = material.split("_")[1]
rarity_List_Of_i.append(float(material_rarity_percent))
print(f"MATERIAL_LIST_OF_I:{material_List_Of_i}")
print(f"RARITY_LIST_OF_I:{rarity_List_Of_i}")
for b in rarity_List_Of_i:
if b == 0:
for x in rarity_List_Of_i:
if x == 0:
ifZeroBool = True
elif b != 0:
break
elif x != 0:
ifZeroBool = False
if enableRarity:
try:
if ifZeroBool:
selected_material = random.choices(material_List_Of_i, k=1)
elif not ifZeroBool:
selected_material = random.choices(material_List_Of_i, weights=rarity_List_Of_i, k=1)
except IndexError:
raise IndexError(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Material List of the Variant collection '{variant}'. For more information on Blend_My_NFTs compatible scenes, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
else:
try:
selected_material = random.choices(material_List_Of_i, k=1)
except IndexError:
raise IndexError(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Material List of the Variant collection '{variant}'. For more information on Blend_My_NFTs compatible scenes, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
if ifZeroBool:
selected_material = random.choices(number_List_Of_i, k=1)
elif not ifZeroBool:
selected_material = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1)
return selected_material[0], materialList
return selected_material[0]
def get_variant_att_index(variant, hierarchy):
variant_attribute = None
@ -91,7 +69,7 @@ def match_DNA_to_Variant(hierarchy, singleDNA):
dnaDictionary.update({x: k})
return dnaDictionary
def apply_materials(hierarchy, singleDNA, materialsFile, enableRarity):
def apply_materials(hierarchy, singleDNA, materialsFile):
"""
DNA with applied material example: "1-1:1-1" <Normal DNA>:<Selected Material for each Variant>
@ -107,22 +85,12 @@ def apply_materials(hierarchy, singleDNA, materialsFile, enableRarity):
complete = False
for b in materialsFile:
if singleDNADict[a] == b:
material_name, materialList, = select_material(materialsFile[b]['Material List'], b, enableRarity)
material_order_num = list(materialList.keys()).index(material_name) # Gets the Order Number of the Material
deconstructed_MaterialDNA[a] = str(material_order_num + 1)
mat = select_material(materialsFile[b]['Material List'])
deconstructed_MaterialDNA[a] = mat
complete = True
if not complete:
deconstructed_MaterialDNA[a] = "0"
# Make Attributes have the same materials:
# Order your Attributes alphabetically, then assign each Attribute a number, starting with 0. So Attribute 'A' = 0,
# Attribute 'B' = 1, 'C' = 2, 'D' = 3, etc. For each pair you want to equal another, add its number it to this list:
# synced_material_attributes = [1, 2]
#
# first_mat = deconstructed_MaterialDNA[synced_material_attributes[0]]
# for i in synced_material_attributes:
# deconstructed_MaterialDNA[i] = first_mat
material_DNA = ""
for a in deconstructed_MaterialDNA:
num = "-" + str(deconstructed_MaterialDNA[a])

Wyświetl plik

@ -4,9 +4,6 @@
import bpy
import random
from .Constants import bcolors, removeList, remove_file_by_extension
def createDNArarity(hierarchy):
"""
Sorts through DataDictionary and appropriately weights each variant based on their rarity percentage set in Blender
@ -17,6 +14,7 @@ def createDNArarity(hierarchy):
for i in hierarchy:
number_List_Of_i = []
rarity_List_Of_i = []
count = 0
ifZeroBool = None
for k in hierarchy[i]:
@ -26,24 +24,18 @@ def createDNArarity(hierarchy):
rarity = hierarchy[i][k]["rarity"]
rarity_List_Of_i.append(float(rarity))
count += 1
for x in rarity_List_Of_i:
if x == 0:
ifZeroBool = True
elif x != 0:
ifZeroBool = False
try:
if ifZeroBool:
variantByNum = random.choices(number_List_Of_i, k=1)
elif not ifZeroBool:
variantByNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1)
except IndexError:
raise IndexError(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"An issue was found within the Attribute collection '{i}'. For more information on Blend_My_NFTs compatible scenes, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
if ifZeroBool:
variantByNum = random.choices(number_List_Of_i, k=1)
elif not ifZeroBool:
variantByNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1)
singleDNA += "-" + str(variantByNum[0])
singleDNA = ''.join(singleDNA.split('-', 1))