Fixing Logic and adding better error handling

__init__.py:
    Removed unnecessary variable passes
    Removed unnecessary imports

Checks.py:
    Added descriptions for all checks in Checks.py
    Created raise_Error_ScriptIgnore() check
    Created raise_Error_numBatches() check
    Created raise_Error_ZeroCombinations() check
    Created raise_Error_numBatchesGreaterThan() check
    Created raise_Warning_maxNFTs() check

DNA_Generator.py:
    Removed and refactored enableGeneration (deprecated material generation) code.
    Deprecated try excepts statements and errors for Checks.py
    Removed unnecessary variable passes
    Refactored # Messages
    Added loading animation

Exporter.py:
    Removed Material Handling section
    Refactored exporter console messages
    Added new loading animation for console when exporting images, animations or 3D models

loading_animaiton.py:
    Added loading animation file class

Logic.py:
    Fixed logic so that all rules are guaranteed to work
    Added "Always with" rule
pull/78/head
Torrin Leonard 2022-03-24 21:38:53 -04:00
rodzic c6a9224d67
commit e0528d0a65
6 zmienionych plików z 397 dodań i 260 usunięć

Wyświetl plik

@ -184,7 +184,7 @@ class createData(bpy.types.Operator):
Blend_My_NFTs_Output, batch_json_save_path, nftBatch_save_path = make_directories(save_path)
DNA_Generator.send_To_Record_JSON(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, Blend_My_NFTs_Output)
DNA_Generator.send_To_Record_JSON(maxNFTs, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, Blend_My_NFTs_Output)
Batch_Sorter.makeBatches(nftName, maxNFTs, nftsPerBatch, save_path, batch_json_save_path)
self.report({'INFO'}, f"NFT Data created!")

Wyświetl plik

@ -9,11 +9,8 @@
import bpy
import os
import sys
import json
import importlib
from collections import Counter
from collections import defaultdict
from collections import Counter, defaultdict
class bcolors:
@ -26,9 +23,15 @@ class bcolors:
ERROR = '\033[91m' # RED
RESET = '\033[0m' # RESET COLOR
# Checks:
def check_Scene():
"""
Checks if Blender file Scene follows the Blend_My_NFTs conventions. If not, raises error with all instances of
violations.
"""
# Rarity Check
def check_Rarity(hierarchy, DNAList, save_path):
"""Checks rarity percentage of each Variant, then sends it to RarityData.json in NFT_Data folder."""
numNFTsGenerated = len(DNAList)
attributeNames = []
@ -102,6 +105,65 @@ def check_Duplicates(DNAList):
print(f"NFTRecord.json contains {duplicates} duplicate NFT DNA.")
if __name__ == '__main__':
check_Rarity()
check_Duplicates()
# Raise Errors:
def raise_Error_ScriptIgnore():
"""Checks if Script_Ignore collection exists, if not raises error."""
try:
scriptIgnore = bpy.data.collections["Script_Ignore"]
return scriptIgnore
except KeyError:
raise KeyError(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"Script_Ignore collection not found in Blender scene. Please add the Script_Ignore "
f"collection to Blender scene or ensure the spelling is exactly 'Script_Ignore'. For more information, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}"
)
def raise_Error_numBatches(maxNFTs, nftsPerBatch):
"""Checks if number of Batches is less than maxNFTs, if not raises error."""
try:
numBatches = maxNFTs / nftsPerBatch
return numBatches
except ZeroDivisionError:
print(f"{bcolors.ERROR} ERROR:\nnftsPerBatch in config.py needs to be a positive integer. {bcolors.RESET}")
raise ZeroDivisionError(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"The number of combinations is less than the number of \n{bcolors.RESET}"
)
def raise_Error_ZeroCombinations(combinations):
"""Checks if combinations is greater than 0, if so, raises error."""
if combinations == 0:
raise ValueError(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"The number of all possible combinations is ZERO. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}"
)
def raise_Error_numBatchesGreaterThan(numBatches):
if numBatches < 1:
raise ValueError(
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"The number of Batches is less than 1. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}"
)
# Raise Warnings:
def raise_Warning_maxNFTs(nftsPerBatch, maxNFTs):
"""
"""
if nftsPerBatch > maxNFTs:
raise (
f"\n{bcolors.WARNING}Blend_My_NFTs Warning:\n"
f"The number of NFTs Per Batch you set is smaller than the NFT Collection Size you set.\n{bcolors.RESET}"
)

Wyświetl plik

@ -16,38 +16,25 @@ importlib.reload(Rarity)
importlib.reload(Logic)
importlib.reload(Checks)
enableGeneration = False
colorList = []
class bcolors:
'''
"""
The colour of console messages.
'''
"""
OK = '\033[92m' # GREEN
WARNING = '\033[93m' # YELLOW
ERROR = '\033[91m' # RED
RESET = '\033[0m' # RESET COLOR
time_start = time.time()
def stripColorFromName(name):
return "_".join(name.split("_")[:-1])
def returnData(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity):
'''
def returnData(maxNFTs, nftsPerBatch):
"""
Generates important variables, dictionaries, and lists needed to be stored to catalog the NFTs.
:return: listAllCollections, attributeCollections, attributeCollections1, hierarchy, variantMetaData, possibleCombinations
'''
"""
coll = bpy.context.scene.collection
try:
scriptIgnore = bpy.data.collections["Script_Ignore"]
except:
print(f"{bcolors.ERROR} ERROR:\nScript_Ignore collection is not in .blend file scene. Please add the Script_Ignore collection to your "
f".blend file scene. For more information, read the README.md file.\n {bcolors.RESET}")
scriptIgnore = Checks.raise_Error_ScriptIgnore()
listAllCollInScene = []
listAllCollections = []
@ -60,33 +47,8 @@ def returnData(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity):
for c in traverse_tree(coll):
listAllCollInScene.append(c)
def listSubIgnoreCollections():
def getParentSubCollections(collection):
yield collection
for child in collection.children:
yield from getParentSubCollections(child)
collList = []
for c in getParentSubCollections(scriptIgnore):
collList.append(c.name)
return collList
ignoreList = listSubIgnoreCollections()
for i in listAllCollInScene:
if enableGeneration:
if i.name in colorList:
for j in range(len(colorList[i.name])):
if i.name[-1].isdigit() and i.name not in ignoreList:
listAllCollections.append(i.name + "_" + str(j + 1))
elif j == 0:
listAllCollections.append(i.name)
elif i.name[-1].isdigit() and i.name not in ignoreList:
listAllCollections.append(i.name + "_0")
else:
listAllCollections.append(i.name)
else:
listAllCollections.append(i.name)
listAllCollections.append(i.name)
listAllCollections.remove(scriptIgnore.name)
@ -96,11 +58,11 @@ def returnData(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity):
if "Master Collection" in listAllCollections:
listAllCollections.remove("Master Collection")
def allScriptIgnore(collection):
'''
def allScriptIgnore(scriptIgnore):
"""
Removes all collections, sub collections in Script_Ignore collection from listAllCollections.
'''
for coll in list(collection.children):
"""
for coll in list(scriptIgnore.children):
listAllCollections.remove(coll.name)
listColl = list(coll.children)
if len(listColl) > 0:
@ -145,7 +107,7 @@ def returnData(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity):
def getOrder_rarity(i):
"""
Returns the "order", "rarity" and "color" (if enabled) of i attribute variant in a list
Returns the "order" and "rarity" (if enabled) of i attribute variant in a list
"""
x = re.sub(r'[a-zA-Z]', "", i)
a = x.split("_")
@ -155,30 +117,11 @@ def returnData(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity):
name = getName(i)
orderRarity = getOrder_rarity(i)
if len(orderRarity) == 0:
print(f"{bcolors.ERROR} \nERROR: {bcolors.RESET}")
print(f"The collection {i} doesn't follow the naming conventions of attributes. Please move this \n"
"colleciton to Script_Ignore or review proper collection format in README.md")
return
number = orderRarity[0]
rarity = orderRarity[1]
elif len(orderRarity) > 0:
number = orderRarity[0]
if enableGeneration:
if count == 1 or count == 0:
previousAttribute = i.partition("_")[0]
count +=1
elif i.partition("_")[0] == previousAttribute:
count +=1
else:
count = 1
number = str(count)
rarity = orderRarity[1]
if enableGeneration and stripColorFromName(i) in colorList:
color = orderRarity[2]
else:
color = "0"
eachObject = {"name": name, "number": number, "rarity": rarity, "color": color}
allAttDataList[i] = eachObject
eachObject = {"name": name, "number": number, "rarity": rarity}
allAttDataList[i] = eachObject
return allAttDataList
variantMetaData = attributeData(attributeVariants)
@ -192,17 +135,7 @@ def returnData(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity):
colParLong = list(bpy.data.collections[str(i)].children)
colParShort = {}
for x in colParLong:
if enableGeneration:
"""
Append colors to blender name for PNG generator and NFTRecord.json to create the correct list
"""
if x.name in colorList:
for j in range(len(colorList[x.name])):
colParShort[x.name + "_" + str(j+1)] = None
else:
colParShort[x.name + "_0"] = None
else:
colParShort[x.name] = None
colParShort[x.name] = None
hierarchy[i] = colParShort
for a in hierarchy:
@ -233,24 +166,14 @@ def returnData(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity):
for i in hierarchyByNum:
combinations = combinations*i
try:
numBatches = combinations/nftsPerBatch
# Checks:
numBatches = Checks.raise_Error_numBatches(maxNFTs, nftsPerBatch)
except:
print(f"{bcolors.ERROR} ERROR:\nnftsPerBatch in config.py needs to be a positive integer. {bcolors.RESET}")
Checks.raise_Error_ZeroCombinations(combinations)
if combinations == 0:
print(bcolors.ERROR + "\nERROR:" + bcolors.RESET)
print("The number of all possible combinations is equal to 0. Please review your collection hierarchy"
"and ensure it is formatted correctly. Please review README.md for more information. \nHere is the "
"hierarchy of all collections the DNA_Generator gathered from your .blend file, excluding those in "
f"Script_Ignore: {hierarchy}")
Checks.raise_Error_numBatchesGreaterThan(numBatches)
if numBatches < 1:
print(f"{bcolors.ERROR} ERROR: {bcolors.RESET}")
print("The number of NFTs Per Batch (nftsPerBatch variable in config.py) is to high. There are a total of "
f" {combinations} possible NFT combinations and you've requested {nftsPerBatch} NFTs per batch. "
f"Lower the number of NFTs per batch in config.py or increase the number of attributes and/or variants in your .blend file.")
Checks.raise_Error_numBatchesGreaterThan(numBatches)
return combinations
@ -258,29 +181,12 @@ def returnData(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity):
return listAllCollections, attributeCollections, attributeCollections1, hierarchy, possibleCombinations
def generateNFT_DNA(nftName, maxNFTs, nftsPerBatch, save_path, logicFile, enableRarity, enableLogic):
def generateNFT_DNA(maxNFTs, nftsPerBatch, logicFile, enableRarity, enableLogic):
"""
Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the DNAList.
"""
listAllCollections, attributeCollections, attributeCollections1, hierarchy, possibleCombinations = returnData(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity)
# Messages:
print(f"NFT Combinations: {possibleCombinations}\n")
print(f"Generating {maxNFTs} combinations of DNA.\n")
if nftsPerBatch > maxNFTs:
print(bcolors.WARNING + "\nWARNING:" + bcolors.RESET)
print(
f"The Max num of NFTs you chose is smaller than the NFTs Per Batch you set. Only {maxNFTs} were added to 1 batch")
if not enableRarity and not enableLogic:
print(f"{bcolors.OK}DNA will be determined randomly, no special properties applied. {bcolors.RESET}")
if enableRarity:
print(f"{bcolors.OK}Rarity is ON. Weights listed in .blend will be taken into account.{bcolors.RESET}")
if enableLogic:
print(f"{bcolors.OK}Logic is ON. Rules listed in {logicFile} will be taken into account.{bcolors.RESET}")
listAllCollections, attributeCollections, attributeCollections1, hierarchy, possibleCombinations = returnData(maxNFTs, nftsPerBatch)
# DNA random, Rarity and Logic methods:
DataDictionary = {}
@ -355,7 +261,7 @@ def generateNFT_DNA(nftName, maxNFTs, nftsPerBatch, save_path, logicFile, enable
return DataDictionary, possibleCombinations
def send_To_Record_JSON(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, Blend_My_NFTs_Output):
def send_To_Record_JSON(maxNFTs, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, Blend_My_NFTs_Output):
"""
Creates NFTRecord.json file and sends "batchDataDictionary" to it. NFTRecord.json is a permanent record of all DNA
you've generated with all attribute variants. If you add new variants or attributes to your .blend file, other scripts
@ -363,22 +269,77 @@ def send_To_Record_JSON(nftName, maxNFTs, nftsPerBatch, save_path, enableRarity,
repeate DNA.
"""
DataDictionary, possibleCombinations = generateNFT_DNA(nftName, maxNFTs, nftsPerBatch, save_path, logicFile, enableRarity, enableLogic)
# Messages:
print(
f"\n========================================\n"
f"Creating NFT Data. Generating {maxNFTs} NFT DNA.\n"
)
NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json")
if not enableRarity and not enableLogic:
print(f"{bcolors.OK}NFT DNA will be determined randomly, no special properties or parameters are applied.\n{bcolors.RESET}")
Checks.check_Rarity(DataDictionary["hierarchy"], DataDictionary["DNAList"], os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data"))
Checks.check_Duplicates(DataDictionary["DNAList"])
if enableRarity:
print(f"{bcolors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account.\n{bcolors.RESET}")
try:
ledger = json.dumps(DataDictionary, indent=1, ensure_ascii=True)
with open(NFTRecord_save_path, 'w') as outfile:
outfile.write(ledger + '\n')
print(f"{bcolors.OK}{len(DataDictionary['DNAList'])} NFT DNA saved to {NFTRecord_save_path}\n"
f"NFT DNA Successfully created. {bcolors.RESET}")
if enableLogic:
print(f"{bcolors.OK}Logic is ON. Rules listed in {logicFile} will be taken into account.\n{bcolors.RESET}")
except:
print(f"{bcolors.ERROR} ERROR:\nNFT DNA not sent to {NFTRecord_save_path}\n {bcolors.RESET}")
time_start = time.time()
def create_nft_data():
DataDictionary, possibleCombinations = generateNFT_DNA(maxNFTs, nftsPerBatch, logicFile, enableRarity, enableLogic)
NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json")
# Checks:
Checks.raise_Warning_maxNFTs(nftsPerBatch, maxNFTs)
Checks.check_Rarity(DataDictionary["hierarchy"], DataDictionary["DNAList"], os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data"))
Checks.check_Duplicates(DataDictionary["DNAList"])
try:
ledger = json.dumps(DataDictionary, indent=1, ensure_ascii=True)
with open(NFTRecord_save_path, 'w') as outfile:
outfile.write(ledger + '\n')
print(
f"\n{bcolors.OK}Blend_My_NFTs Success:\n"
f"{len(DataDictionary['DNAList'])} NFT DNA saved to {NFTRecord_save_path}. NFT DNA Successfully created.\n{bcolors.RESET}")
except:
raise (
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
f"the naming conventions and scene structure. For more information, "
f"see:\n{bcolors.RESET}"
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
)
return True
# Loading Animations:
bar = [
" [= ]",
" [ = ]",
" [ = ]",
" [ = ]",
" [ = ]",
" [ =]",
" [ = ]",
" [ = ]",
" [ = ]",
" [ = ]",
]
i = 0
while not create_nft_data():
print(bar[i % len(bar)], end="\r")
time.sleep(.2)
i += 1
time_end = time.time()
print(
f"Created NFT Data in {time_end - time_start}s.\n"
)
if __name__ == '__main__':

Wyświetl plik

@ -7,6 +7,10 @@ import os
import time
import json
import sys
import itertools
import threading
from .loading_animation import Loader
enableGeneration = False
colorList = []
@ -110,103 +114,127 @@ def render_and_save_NFTs(nftName, maxNFTs, batchToGenerate, batch_json_save_path
modelFolder = os.path.join(batchFolder, "Models")
metaDataFolder = os.path.join(batchFolder, "BMNFT_metaData")
# Material handling:
if enableGeneration:
for c in dnaDictionary:
collection = dnaDictionary[c]
if stripColorFromName(collection) in colorList:
colorVal = int(collection.rsplit("_", 1)[1])-1
collection = stripColorFromName(collection)
bpy.data.collections[collection].hide_render = False
bpy.data.collections[collection].hide_viewport = False
if generationType == 'color':
for activeObject in bpy.data.collections[collection].all_objects:
mat = bpy.data.materials.new("PKHG")
mat.diffuse_color = colorList[collection][colorVal]
activeObject.active_material = mat
if generationType == 'material':
for activeObject in bpy.data.collections[collection].all_objects:
activeObject.material_slots[0].material = bpy.data.materials[colorList[collection][colorVal]]
else:
collection = stripColorFromName(collection)
bpy.data.collections[collection].hide_render = False
bpy.data.collections[collection].hide_viewport = False
# Generation/Rendering:
if enableImages:
print(f"{bcolors.OK}Rendering Image{bcolors.RESET}")
print(f"{bcolors.OK}---Image---{bcolors.RESET}")
if not os.path.exists(imageFolder):
os.makedirs(imageFolder)
image_render_time_start = time.time()
bpy.context.scene.render.filepath = imagePath
bpy.context.scene.render.image_settings.file_format = imageFileFormat
bpy.ops.render.render(write_still=True)
def render_image():
if not os.path.exists(imageFolder):
os.makedirs(imageFolder)
bpy.context.scene.render.filepath = imagePath
bpy.context.scene.render.image_settings.file_format = imageFileFormat
bpy.ops.render.render(write_still=True)
# Loading Animation:
loading = Loader(f'Rendering Image {x}/{NFTs_in_Batch}...', '').start()
render_image()
loading.stop()
image_render_time_end = time.time()
print(
f"{bcolors.OK}Rendered image in {image_render_time_end - image_render_time_start}s.\n{bcolors.RESET}"
)
if enableAnimations:
print(f"{bcolors.OK}Rendering Animation{bcolors.RESET}")
if not os.path.exists(animationFolder):
os.makedirs(animationFolder)
print(f"{bcolors.OK}---Animation---{bcolors.RESET}")
bpy.context.scene.render.filepath = animationPath
animation_render_time_start = time.time()
if animationFileFormat == 'MP4':
bpy.context.scene.render.image_settings.file_format = "FFMPEG"
def render_animation():
if not os.path.exists(animationFolder):
os.makedirs(animationFolder)
bpy.context.scene.render.ffmpeg.format = 'MPEG4'
bpy.context.scene.render.ffmpeg.codec = 'H264'
bpy.ops.render.render(animation=True)
bpy.context.scene.render.filepath = animationPath
else:
bpy.context.scene.render.image_settings.file_format = animationFileFormat
bpy.ops.render.render(animation=True)
if animationFileFormat == 'MP4':
bpy.context.scene.render.image_settings.file_format = "FFMPEG"
bpy.context.scene.render.ffmpeg.format = 'MPEG4'
bpy.context.scene.render.ffmpeg.codec = 'H264'
bpy.ops.render.render(animation=True)
else:
bpy.context.scene.render.image_settings.file_format = animationFileFormat
bpy.ops.render.render(animation=True)
# Loading Animation:
loading = Loader(f'Rendering Animation {x}/{NFTs_in_Batch}...', '').start()
render_animation()
loading.stop()
animation_render_time_end = time.time()
print(
f"{bcolors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s.\n{bcolors.RESET}"
)
if enableModelsBlender:
print(f"{bcolors.OK}Generating 3D Model{bcolors.RESET}")
if not os.path.exists(modelFolder):
os.makedirs(modelFolder)
print(f"{bcolors.OK}---3D Model---{bcolors.RESET}")
for i in dnaDictionary:
coll = dnaDictionary[i]
if coll != '0':
for obj in bpy.data.collections[coll].all_objects:
obj.select_set(True)
model_generation_time_start = time.time()
for obj in bpy.data.collections['Script_Ignore'].all_objects:
obj.select_set(True)
def generate_models():
if not os.path.exists(modelFolder):
os.makedirs(modelFolder)
if modelFileFormat == 'GLB':
bpy.ops.export_scene.gltf(filepath=f"{modelPath}.glb",
check_existing=True,
export_format='GLB',
use_selection=True)
if modelFileFormat == 'GLTF_SEPARATE':
bpy.ops.export_scene.gltf(filepath=f"{modelPath}",
check_existing=True,
export_format='GLTF_SEPARATE',
use_selection=True)
if modelFileFormat == 'GLTF_EMBEDDED':
bpy.ops.export_scene.gltf(filepath=f"{modelPath}.gltf",
check_existing=True,
export_format='GLTF_EMBEDDED',
use_selection=True)
elif modelFileFormat == 'FBX':
bpy.ops.export_scene.fbx(filepath=f"{modelPath}.fbx",
check_existing=True,
use_selection=True)
elif modelFileFormat == 'OBJ':
bpy.ops.export_scene.obj(filepath=f"{modelPath}.obj",
check_existing=True,
use_selection=True,)
elif modelFileFormat == 'X3D':
bpy.ops.export_scene.x3d(filepath=f"{modelPath}.x3d",
check_existing=True,
use_selection=True)
elif modelFileFormat == 'STL':
bpy.ops.export_mesh.stl(filepath=f"{modelPath}.stl",
check_existing=True,
use_selection=True)
elif modelFileFormat == 'VOX':
bpy.ops.export_vox.some_data(filepath=f"{modelPath}.vox")
for i in dnaDictionary:
coll = dnaDictionary[i]
if coll != '0':
for obj in bpy.data.collections[coll].all_objects:
obj.select_set(True)
for obj in bpy.data.collections['Script_Ignore'].all_objects:
obj.select_set(True)
if modelFileFormat == 'GLB':
bpy.ops.export_scene.gltf(filepath=f"{modelPath}.glb",
check_existing=True,
export_format='GLB',
use_selection=True)
if modelFileFormat == 'GLTF_SEPARATE':
bpy.ops.export_scene.gltf(filepath=f"{modelPath}",
check_existing=True,
export_format='GLTF_SEPARATE',
use_selection=True)
if modelFileFormat == 'GLTF_EMBEDDED':
bpy.ops.export_scene.gltf(filepath=f"{modelPath}.gltf",
check_existing=True,
export_format='GLTF_EMBEDDED',
use_selection=True)
elif modelFileFormat == 'FBX':
bpy.ops.export_scene.fbx(filepath=f"{modelPath}.fbx",
check_existing=True,
use_selection=True)
elif modelFileFormat == 'OBJ':
bpy.ops.export_scene.obj(filepath=f"{modelPath}.obj",
check_existing=True,
use_selection=True, )
elif modelFileFormat == 'X3D':
bpy.ops.export_scene.x3d(filepath=f"{modelPath}.x3d",
check_existing=True,
use_selection=True)
elif modelFileFormat == 'STL':
bpy.ops.export_mesh.stl(filepath=f"{modelPath}.stl",
check_existing=True,
use_selection=True)
elif modelFileFormat == 'VOX':
bpy.ops.export_vox.some_data(filepath=f"{modelPath}.vox")
# Loading Animation:
loading = Loader(f'Rendering Animation {x}/{NFTs_in_Batch}...', '').start()
generate_models()
loading.stop()
model_generation_time_end = time.time()
print(
f"{bcolors.OK}Generated model in {model_generation_time_end - model_generation_time_start}s.\n{bcolors.RESET}"
)
if not os.path.exists(metaDataFolder):
os.makedirs(metaDataFolder)
@ -222,19 +250,18 @@ def render_and_save_NFTs(nftName, maxNFTs, batchToGenerate, batch_json_save_path
with open(os.path.join(metaDataFolder, "Data_" + name + ".json"), 'w') as outfile:
outfile.write(jsonMetaData + '\n')
print("Completed {} render in ".format(name) + "%.4f seconds" % (time.time() - time_start_2))
print(f"Completed {name} render in {time.time() - time_start_2}s")
x += 1
for a in BatchDNAList:
for i in hierarchy:
for j in hierarchy[i]:
if enableGeneration:
j = stripColorFromName(j)
bpy.data.collections[j].hide_render = False
bpy.data.collections[j].hide_viewport = False
for i in hierarchy:
for j in hierarchy[i]:
if enableGeneration:
j = stripColorFromName(j)
bpy.data.collections[j].hide_render = False
bpy.data.collections[j].hide_viewport = False
print(f"\nAll NFTs successfully generated and sent to {nftBatch_save_path}")
print("Completed all renders in Batch{}.json in ".format(batchToGenerate) + "%.4f seconds" % (time.time() - time_start_1) + "\n")
print(f"\nAll NFTs successfully generated and sent to {nftBatch_save_path}"
f"\nCompleted all renders in Batch{batchToGenerate}.json in {time.time() - time_start_1}s\n")
if __name__ == '__main__':

Wyświetl plik

@ -40,7 +40,7 @@ def getVarNum(variant):
num = variant.split("_")[1]
return num
def items_to_num(hierarchy, items_List):
def items_to_num(items_List):
num_List = {}
for i in items_List:
variant_num_list = []
@ -92,8 +92,14 @@ def rar_selectVar(hierarchy, items_List, deconstructed_DNA):
return deconstructed_DNA
def reconstructDNA(deconstructedDNA):
reconstructed_DNA = ""
for a in deconstructedDNA:
num = "-" + str(a)
reconstructed_DNA += num
return (''.join(reconstructed_DNA.split('-', 1)))
# Rule Check:
# Rule Checks:
def never_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
"""Returns True if singleDNA violates Never with Rule stated in Logic.json."""
violates_rule = None
@ -108,7 +114,7 @@ def never_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
return violates_rule
def only_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
"""Returns true if singleDNA violates Only with RUle stated in Logic.json."""
"""Returns True if singleDNA violates Only with Rule stated in Logic.json."""
violates_rule = None
for a in num_List1:
for b in num_List2:
@ -121,45 +127,68 @@ def only_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
violates_rule = False
return violates_rule
def always_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
"""Returns True if singleDNA violates Always with Rule stated in Logic.json."""
violates_rule = None
for a in num_List2:
if str(deconstructed_DNA[getAttIndex(hierarchy, a)]) not in num_List2[a]:
violates_rule = True
return violates_rule
else:
violates_rule = False
return violates_rule
# Main Function
def logicafyDNAsingle(hierarchy, singleDNA, logicFile):
logicFile = json.load(open(logicFile))
deconstructed_DNA = singleDNA.split("-")
for rule in logicFile:
items_List1 = isAttorVar(hierarchy, logicFile[rule]["Items-1"])
items_List2 = isAttorVar(hierarchy, logicFile[rule]["Items-2"])
num_List1 = items_to_num(hierarchy, items_List1)
num_List2 = items_to_num(hierarchy, items_List2)
didReconstruct = True
originalDNA = str(singleDNA)
resultDNA = str(singleDNA)
if logicFile[rule]["Rule-Type"] == "Never with":
if never_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
while didReconstruct:
didReconstruct = False
for rule in logicFile:
items_List1 = isAttorVar(hierarchy, logicFile[rule]["Items-1"])
items_List2 = isAttorVar(hierarchy, logicFile[rule]["Items-2"])
num_List1 = items_to_num(items_List1)
num_List2 = items_to_num(items_List2)
rand_bool = bool(random.getrandbits(1))
if logicFile[rule]["Rule-Type"] == "Never with":
if never_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
if rand_bool:
deconstructed_DNA = rar_selectVar(hierarchy, items_List2, deconstructed_DNA)
rand_bool = bool(random.getrandbits(1))
if not rand_bool:
deconstructed_DNA = rar_selectVar(hierarchy, items_List1, deconstructed_DNA)
if rand_bool:
deconstructed_DNA = rar_selectVar(hierarchy, items_List2, deconstructed_DNA)
if logicFile[rule]["Rule-Type"] == "Only with":
if only_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
for b in num_List1:
if "0" in num_List1[b]: # If complete attribute
deconstructed_DNA[getAttIndex(hierarchy, b)] = "0"
if "0" not in num_List1[b]: # Not complete attribute, select from other variants with rarity:
if not rand_bool:
deconstructed_DNA = rar_selectVar(hierarchy, items_List1, deconstructed_DNA)
reconstructed_DNA = ""
for a in deconstructed_DNA:
num = "-" + str(a)
reconstructed_DNA += num
singleDNA = (''.join(reconstructed_DNA.split('-', 1)))
if logicFile[rule]["Rule-Type"] == "Only with":
if only_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
for b in num_List1:
if "0" in num_List1[b]: # If complete attribute
deconstructed_DNA[getAttIndex(hierarchy, b)] = "0"
return singleDNA
if "0" not in num_List1[b]: # Not complete attribute, select from other variants with rarity:
deconstructed_DNA = rar_selectVar(hierarchy, items_List1, deconstructed_DNA)
if logicFile[rule]["Rule-Type"] == "Always with":
if always_with_Rule_Check(hierarchy, deconstructed_DNA, num_List1, num_List2):
deconstructed_DNA = rar_selectVar(hierarchy, items_List1, deconstructed_DNA)
newDNA = reconstructDNA(deconstructed_DNA)
if newDNA != originalDNA:
originalDNA = str(newDNA)
didReconstruct = True
resultDNA = str(newDNA)
break
return resultDNA
if __name__ == '__main__':

Wyświetl plik

@ -0,0 +1,58 @@
from itertools import cycle
from shutil import get_terminal_size
from threading import Thread
from time import sleep
class Loader:
def __init__(self, desc="Loading...", end="Done!", timeout=0.1):
"""
A loader-like context manager
Args:
desc (str, optional): The loader's description. Defaults to "Loading...".
end (str, optional): Final print. Defaults to "Done!".
timeout (float, optional): Sleep time between prints. Defaults to 0.1.
"""
self.desc = desc
self.end = end
self.timeout = timeout
self._thread = Thread(target=self._animate, daemon=True)
self.steps = ["", "", "", "", "", "", "", ""]
self.done = False
def start(self):
self._thread.start()
return self
def _animate(self):
for c in cycle(self.steps):
if self.done:
break
print(f"\r{self.desc} {c}", flush=True, end="")
sleep(self.timeout)
def __enter__(self):
self.start()
def stop(self):
self.done = True
cols = get_terminal_size((80, 20)).columns
print("\r" + " " * cols, end="", flush=True)
print(f"\r{self.end}", flush=True)
def __exit__(self, exc_type, exc_value, tb):
# handle exceptions with those variables ^
self.stop()
if __name__ == "__main__":
with Loader("Loading with context manager..."):
for i in range(10):
sleep(0.25)
loader = Loader("Loading with object...", "That was fast!", 0.05).start()
for i in range(10):
sleep(0.25)
loader.stop()