kopia lustrzana https://github.com/torrinworx/Blend_My_NFTs
Merge pull request #142 from torrinworx/Debug-Mode-and-Pep8-Formatting
Debug mode and pep8 formattingpull/144/head v4.5.1
commit
5bf19488d2
|
@ -11,6 +11,7 @@ from bpy.types import (Operator,
|
|||
PropertyGroup,
|
||||
UIList)
|
||||
|
||||
|
||||
# ======== Operators ======== #
|
||||
class CUSTOM_OT_custom_metadata_fields_actions(Operator):
|
||||
"""Move items up and down, add and remove"""
|
||||
|
@ -103,6 +104,7 @@ class CUSTOM_UL_custom_metadata_fields_items(UIList):
|
|||
def invoke(self, context, event):
|
||||
pass
|
||||
|
||||
|
||||
# ======== Property Collection ======== #
|
||||
class CUSTOM_custom_metadata_fields_objectCollection(PropertyGroup):
|
||||
# name: StringProperty() -> Instantiated by default
|
|
@ -11,6 +11,7 @@ from bpy.types import (Operator,
|
|||
PropertyGroup,
|
||||
UIList)
|
||||
|
||||
|
||||
# ======== Operators ======== #
|
||||
class CUSTOM_OT_logic_actions(Operator):
|
||||
"""Move items up and down, add and remove"""
|
745
__init__.py
745
__init__.py
Plik diff jest za duży
Load Diff
219
main/Checks.py
219
main/Checks.py
|
@ -1,219 +0,0 @@
|
|||
# Purpose:
|
||||
# The purpose of this file is to check the NFTRecord.json for duplicate NFT DNA and returns any found in the console.
|
||||
# It also checks the percentage each variant is chosen in the NFTRecord, then compares it with its rarity percentage
|
||||
# set in the .blend file.
|
||||
|
||||
# This file is provided for transparency. The accuracy of the rarity values you set in your .blend file as outlined in
|
||||
# the README.md file are dependent on the maxNFTs, and the maximum number of combinations of your NFT collection.
|
||||
|
||||
import bpy
|
||||
import os
|
||||
import json
|
||||
from collections import Counter, defaultdict
|
||||
|
||||
from . import DNA_Generator, get_combinations
|
||||
from .Constants import bcolors, removeList, remove_file_by_extension
|
||||
|
||||
|
||||
# Checks:
|
||||
def check_Scene(): # Not complete
|
||||
"""
|
||||
Checks if Blender file Scene follows the Blend_My_NFTs conventions. If not, raises error with all instances of
|
||||
violations.
|
||||
"""
|
||||
|
||||
script_ignore_exists = None # True if Script_Ignore collection exists in Blender scene
|
||||
attribute_naming_conventions = None # True if all attributes in Blender scene follow BMNFTs naming conventions
|
||||
variant_naming_conventions = None # True if all variants in Blender scene follow BMNFTs naming conventions
|
||||
object_placing_conventions = None # True if all objects are within either Script_Ignore or a variant collection
|
||||
|
||||
# script_ignore_exists:
|
||||
try:
|
||||
scriptIgnoreCollection = bpy.data.collections["Script_Ignore"]
|
||||
script_ignore_exists = True
|
||||
except KeyError:
|
||||
raise TypeError(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"Add a Script_Ignore collection to your Blender scene and ensure the name is exactly 'Script_Ignore'. For more information, "
|
||||
f"see:"
|
||||
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}"
|
||||
)
|
||||
|
||||
hierarchy = DNA_Generator.get_hierarchy()
|
||||
collections = bpy.context.scene.collection
|
||||
|
||||
# attribute_naming_conventions
|
||||
|
||||
def check_Rarity(hierarchy, DNAListFormatted, save_path):
|
||||
"""Checks rarity percentage of each Variant, then sends it to RarityData.json in NFT_Data folder."""
|
||||
|
||||
DNAList = []
|
||||
for i in DNAListFormatted:
|
||||
DNAList.append(list(i.keys())[0])
|
||||
|
||||
|
||||
numNFTsGenerated = len(DNAList)
|
||||
|
||||
numDict = defaultdict(list)
|
||||
|
||||
hierarchy.keys()
|
||||
|
||||
for i in DNAList:
|
||||
dnaSplitList = i.split("-")
|
||||
|
||||
for j, k in zip(dnaSplitList, hierarchy.keys()):
|
||||
numDict[k].append(j)
|
||||
|
||||
numDict = dict(numDict)
|
||||
|
||||
for i in numDict:
|
||||
count = dict(Counter(numDict[i]))
|
||||
numDict[i] = count
|
||||
|
||||
fullNumName = {}
|
||||
|
||||
for i in hierarchy:
|
||||
fullNumName[i] = {}
|
||||
for j in hierarchy[i]:
|
||||
variantNum = hierarchy[i][j]["number"]
|
||||
|
||||
fullNumName[i][variantNum] = j
|
||||
|
||||
completeData = {}
|
||||
|
||||
for i, j in zip(fullNumName, numDict):
|
||||
x = {}
|
||||
|
||||
for k in fullNumName[i]:
|
||||
|
||||
for l in numDict[j]:
|
||||
if l == k:
|
||||
name = fullNumName[i][k]
|
||||
num = numDict[j][l]
|
||||
x[name] = [(str(round(((num/numNFTsGenerated)*100), 2)) + "%"), str(num)]
|
||||
|
||||
completeData[i] = x
|
||||
|
||||
print(
|
||||
f"\n{bcolors.OK}\n"
|
||||
f"Rarity Checker is active. These are the percentages for each variant per attribute you set in your .blend file:"
|
||||
f"\n{bcolors.RESET}"
|
||||
)
|
||||
|
||||
for i in completeData:
|
||||
print(i + ":")
|
||||
for j in completeData[i]:
|
||||
print(" " + j + ": " + completeData[i][j][0] + " Occurrences: " + completeData[i][j][1])
|
||||
|
||||
jsonMetaData = json.dumps(completeData, indent=1, ensure_ascii=True)
|
||||
|
||||
with open(os.path.join(save_path, "RarityData.json"), 'w') as outfile:
|
||||
outfile.write(jsonMetaData + '\n')
|
||||
path = os.path.join(save_path, "RarityData.json")
|
||||
print(bcolors.OK + f"Rarity Data has been saved to {path}." + bcolors.RESET)
|
||||
|
||||
def check_Duplicates(DNAListFormatted):
|
||||
"""Checks if there are duplicates in DNAList before NFTRecord.json is sent to JSON file."""
|
||||
DNAList = []
|
||||
for i in DNAListFormatted:
|
||||
DNAList.append(list(i.keys())[0])
|
||||
|
||||
|
||||
duplicates = 0
|
||||
seen = set()
|
||||
|
||||
for x in DNAList:
|
||||
if x in seen:
|
||||
print(x)
|
||||
duplicates += 1
|
||||
seen.add(x)
|
||||
|
||||
print(f"\nNFTRecord.json contains {duplicates} duplicate NFT DNA.")
|
||||
|
||||
def check_FailedBatches(batch_json_save_path):
|
||||
fail_state = False
|
||||
failed_batch = None
|
||||
failed_dna = None
|
||||
failed_dna_index = None
|
||||
|
||||
if os.path.isdir(batch_json_save_path):
|
||||
batch_folders = remove_file_by_extension(os.listdir(batch_json_save_path))
|
||||
|
||||
for i in batch_folders:
|
||||
batch = json.load(open(os.path.join(batch_json_save_path, i)))
|
||||
NFTs_in_Batch = batch["NFTs_in_Batch"]
|
||||
if "Generation Save" in batch:
|
||||
dna_generated = batch["Generation Save"][-1]["DNA Generated"]
|
||||
if dna_generated is not None and dna_generated < NFTs_in_Batch:
|
||||
fail_state = True
|
||||
failed_batch = int(i.removeprefix("Batch").removesuffix(".json"))
|
||||
failed_dna = dna_generated
|
||||
|
||||
return fail_state, failed_batch, failed_dna, failed_dna_index
|
||||
|
||||
# Raise Errors:
|
||||
def raise_Error_numBatches(maxNFTs, nftsPerBatch):
|
||||
"""Checks if number of Batches is less than maxNFTs, if not raises error."""
|
||||
|
||||
try:
|
||||
numBatches = maxNFTs / nftsPerBatch
|
||||
return numBatches
|
||||
except ZeroDivisionError:
|
||||
raise ZeroDivisionError(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"The number of NFTs per Batch must be greater than ZERO."
|
||||
f"Please review your Blender scene and ensure it follows "
|
||||
f"the naming conventions and scene structure. For more information, "
|
||||
f"see:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}"
|
||||
)
|
||||
|
||||
def raise_Error_ZeroCombinations():
|
||||
"""Checks if combinations is greater than 0, if so, raises error."""
|
||||
if get_combinations.get_combinations() == 0:
|
||||
raise ValueError(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"The number of all possible combinations is ZERO. Please review your Blender scene and ensure it follows "
|
||||
f"the naming conventions and scene structure. For more information, "
|
||||
f"see:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}"
|
||||
)
|
||||
|
||||
def raise_Error_numBatchesGreaterThan(numBatches):
|
||||
if numBatches < 1:
|
||||
raise ValueError(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"The number of Batches is less than 1. Please review your Blender scene and ensure it follows "
|
||||
f"the naming conventions and scene structure. For more information, "
|
||||
f"see:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{bcolors.RESET}"
|
||||
)
|
||||
|
||||
# Raise Warnings:
|
||||
|
||||
def raise_Warning_maxNFTs(nftsPerBatch, collectionSize):
|
||||
"""
|
||||
Prints warning if nftsPerBatch is greater than collectionSize.
|
||||
"""
|
||||
|
||||
if nftsPerBatch > collectionSize:
|
||||
raise ValueError(
|
||||
f"\n{bcolors.WARNING}Blend_My_NFTs Warning:\n"
|
||||
f"The number of NFTs Per Batch you set is smaller than the NFT Collection Size you set.\n{bcolors.RESET}"
|
||||
)
|
||||
|
||||
def raise_Warning_collectionSize(DNAList, collectionSize):
|
||||
"""
|
||||
Prints warning if BMNFTs cannot generate requested number of NFTs from a given collectionSize.
|
||||
"""
|
||||
|
||||
if len(DNAList) < collectionSize:
|
||||
print(f"\n{bcolors.WARNING} \nWARNING: \n"
|
||||
f"Blend_My_NFTs cannot generate {collectionSize} NFTs."
|
||||
f" Only {len(DNAList)} NFT DNA were generated."
|
||||
|
||||
f"\nThis might be for a number of reasons:"
|
||||
f"\n a) Rarity is preventing combinations from being generated (See https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n"
|
||||
f"\n b) Logic is preventing combinations from being generated (See https://github.com/torrinworx/Blend_My_NFTs#logic).\n"
|
||||
f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or Attributes to increase the recommended collection size.\n"
|
||||
f"\n{bcolors.RESET}")
|
|
@ -1,51 +0,0 @@
|
|||
# Purpose:
|
||||
# This file is for storing or updating constant values that may need to be changes depending on system requirements and
|
||||
# different usecases.
|
||||
import os
|
||||
import json
|
||||
import platform
|
||||
|
||||
removeList = [".gitignore", ".DS_Store", "desktop.ini", ".ini"]
|
||||
|
||||
def remove_file_by_extension(dirlist):
|
||||
"""
|
||||
Checks if a given directory list contains any of the files or file extensions listed above, if so, remove them from
|
||||
list and return a clean dir list. These files interfer with BMNFTs operations and should be removed whenever dealing
|
||||
with directories.
|
||||
"""
|
||||
|
||||
if str(type(dirlist)) == "<class 'list'>":
|
||||
dirlist = list(dirlist) # converts single string path to list if dir pasted as string
|
||||
|
||||
return_dirs = []
|
||||
for directory in dirlist:
|
||||
if not str(os.path.split(directory)[1]) in removeList:
|
||||
return_dirs.append(directory)
|
||||
|
||||
return return_dirs
|
||||
|
||||
|
||||
class bcolors:
|
||||
"""
|
||||
The colour of console messages.
|
||||
"""
|
||||
|
||||
OK = '\033[92m' # GREEN
|
||||
WARNING = '\033[93m' # YELLOW
|
||||
ERROR = '\033[91m' # RED
|
||||
RESET = '\033[0m' # RESET COLOR
|
||||
|
||||
def save_result(result):
|
||||
"""
|
||||
Saves json result to json file at the specified path.
|
||||
"""
|
||||
file_name = "log.json"
|
||||
if platform.system() == "Linux" or platform.system() == "Darwin":
|
||||
path = os.path.join(os.path.join(os.path.expanduser('~')), 'Desktop', file_name)
|
||||
|
||||
if platform.system() == "Windows":
|
||||
path = os.path.join(os.environ["HOMEPATH"], "Desktop", file_name)
|
||||
|
||||
data = json.dumps(result, indent=1, ensure_ascii=True)
|
||||
with open(path, 'w') as outfile:
|
||||
outfile.write(data + '\n')
|
|
@ -1,368 +0,0 @@
|
|||
# Purpose:
|
||||
# This file generates NFT DNA based on a .blend file scene structure and exports NFTRecord.json.
|
||||
|
||||
import bpy
|
||||
import os
|
||||
import re
|
||||
import copy
|
||||
import time
|
||||
import json
|
||||
import random
|
||||
from functools import partial
|
||||
from .loading_animation import Loader
|
||||
from . import Rarity, Logic, Checks, Material_Generator
|
||||
from .Constants import bcolors, removeList, remove_file_by_extension
|
||||
|
||||
|
||||
def get_hierarchy():
|
||||
"""
|
||||
Returns the hierarchy of a given Blender scene.
|
||||
"""
|
||||
|
||||
coll = bpy.context.scene.collection
|
||||
|
||||
scriptIgnoreCollection = bpy.data.collections["Script_Ignore"]
|
||||
|
||||
listAllCollInScene = []
|
||||
listAllCollections = []
|
||||
|
||||
def traverse_tree(t):
|
||||
yield t
|
||||
for child in t.children:
|
||||
yield from traverse_tree(child)
|
||||
|
||||
for c in traverse_tree(coll):
|
||||
listAllCollInScene.append(c)
|
||||
|
||||
for i in listAllCollInScene:
|
||||
listAllCollections.append(i.name)
|
||||
|
||||
listAllCollections.remove(scriptIgnoreCollection.name)
|
||||
|
||||
if "Scene Collection" in listAllCollections:
|
||||
listAllCollections.remove("Scene Collection")
|
||||
|
||||
if "Master Collection" in listAllCollections:
|
||||
listAllCollections.remove("Master Collection")
|
||||
|
||||
def allScriptIgnore(scriptIgnoreCollection):
|
||||
# Removes all collections, sub collections in Script_Ignore collection from listAllCollections.
|
||||
|
||||
for coll in list(scriptIgnoreCollection.children):
|
||||
listAllCollections.remove(coll.name)
|
||||
listColl = list(coll.children)
|
||||
if len(listColl) > 0:
|
||||
allScriptIgnore(coll)
|
||||
|
||||
allScriptIgnore(scriptIgnoreCollection)
|
||||
listAllCollections.sort()
|
||||
|
||||
exclude = ["_"] # Excluding characters that identify a Variant
|
||||
attributeCollections = copy.deepcopy(listAllCollections)
|
||||
|
||||
def filter_num():
|
||||
"""
|
||||
This function removes items from 'attributeCollections' if they include values from the 'exclude' variable.
|
||||
It removes child collections from the parent collections in from the "listAllCollections" list.
|
||||
"""
|
||||
for x in attributeCollections:
|
||||
if any(a in x for a in exclude):
|
||||
attributeCollections.remove(x)
|
||||
|
||||
for i in range(len(listAllCollections)):
|
||||
filter_num()
|
||||
|
||||
attributeVariants = [x for x in listAllCollections if x not in attributeCollections]
|
||||
attributeCollections1 = copy.deepcopy(attributeCollections)
|
||||
|
||||
def attributeData(attributeVariants):
|
||||
"""
|
||||
Creates a dictionary of each attribute
|
||||
"""
|
||||
allAttDataList = {}
|
||||
for i in attributeVariants:
|
||||
# Check if name follows naming conventions:
|
||||
if int(i.count("_")) > 2 and int(i.split("_")[1]) > 0:
|
||||
raise Exception(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
|
||||
f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
|
||||
try:
|
||||
number = i.split("_")[1]
|
||||
name = i.split("_")[0]
|
||||
rarity = i.split("_")[2]
|
||||
except IndexError:
|
||||
raise Exception(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
|
||||
f"Review the naming convention of Attribute and Variant collections here:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
|
||||
allAttDataList[i] = {"name": name, "number": number, "rarity": rarity}
|
||||
return allAttDataList
|
||||
|
||||
variantMetaData = attributeData(attributeVariants)
|
||||
|
||||
hierarchy = {}
|
||||
for i in attributeCollections1:
|
||||
colParLong = list(bpy.data.collections[str(i)].children)
|
||||
colParShort = {}
|
||||
for x in colParLong:
|
||||
colParShort[x.name] = None
|
||||
hierarchy[i] = colParShort
|
||||
|
||||
for a in hierarchy:
|
||||
for b in hierarchy[a]:
|
||||
for x in variantMetaData:
|
||||
if str(x) == str(b):
|
||||
(hierarchy[a])[b] = variantMetaData[x]
|
||||
|
||||
return hierarchy
|
||||
|
||||
|
||||
def generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials, materialsFile):
|
||||
"""
|
||||
Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the DNAList.
|
||||
"""
|
||||
|
||||
hierarchy = get_hierarchy()
|
||||
|
||||
# DNA random, Rarity and Logic methods:
|
||||
DataDictionary = {}
|
||||
|
||||
def createDNArandom(hierarchy):
|
||||
"""Creates a single DNA randomly without Rarity or Logic."""
|
||||
dnaStr = ""
|
||||
dnaStrList = []
|
||||
listOptionVariant = []
|
||||
|
||||
for i in hierarchy:
|
||||
numChild = len(hierarchy[i])
|
||||
possibleNums = list(range(1, numChild + 1))
|
||||
listOptionVariant.append(possibleNums)
|
||||
|
||||
for i in listOptionVariant:
|
||||
randomVariantNum = random.choices(i, k=1)
|
||||
str1 = ''.join(str(e) for e in randomVariantNum)
|
||||
dnaStrList.append(str1)
|
||||
|
||||
for i in dnaStrList:
|
||||
num = "-" + str(i)
|
||||
dnaStr += num
|
||||
|
||||
dna = ''.join(dnaStr.split('-', 1))
|
||||
|
||||
return str(dna)
|
||||
|
||||
def singleCompleteDNA():
|
||||
"""
|
||||
This function applies Rarity and Logic to a single DNA created by createDNASingle() if Rarity or Logic specified
|
||||
"""
|
||||
|
||||
singleDNA = ""
|
||||
# Comments for debugging random, rarity, logic, and materials.
|
||||
if not enableRarity:
|
||||
singleDNA = createDNArandom(hierarchy)
|
||||
# print("============")
|
||||
# print(f"Original DNA: {singleDNA}")
|
||||
if enableRarity:
|
||||
singleDNA = Rarity.createDNArarity(hierarchy)
|
||||
# print(f"Rarity DNA: {singleDNA}")
|
||||
|
||||
if enableLogic:
|
||||
singleDNA = Logic.logicafyDNAsingle(hierarchy, singleDNA, logicFile, enableRarity, enableMaterials)
|
||||
# print(f"Logic DNA: {singleDNA}")
|
||||
|
||||
if enableMaterials:
|
||||
singleDNA = Material_Generator.apply_materials(hierarchy, singleDNA, materialsFile, enableRarity)
|
||||
# print(f"Materials DNA: {singleDNA}")
|
||||
|
||||
# print("============\n")
|
||||
|
||||
return singleDNA
|
||||
|
||||
def create_DNAList():
|
||||
"""Creates DNAList. Loops through createDNARandom() and applies Rarity, and Logic while checking if all DNA are unique"""
|
||||
DNASetReturn = set()
|
||||
|
||||
for i in range(collectionSize):
|
||||
dnaPushToList = partial(singleCompleteDNA)
|
||||
|
||||
DNASetReturn |= {''.join([dnaPushToList()]) for _ in range(collectionSize - len(DNASetReturn))}
|
||||
|
||||
DNAListUnformatted = list(DNASetReturn)
|
||||
|
||||
DNAListFormatted = []
|
||||
DNA_Counter = 1
|
||||
for i in DNAListUnformatted:
|
||||
DNAListFormatted.append({
|
||||
i: {
|
||||
"Complete": False,
|
||||
"Order_Num": DNA_Counter
|
||||
}
|
||||
})
|
||||
|
||||
DNA_Counter += 1
|
||||
|
||||
return DNAListFormatted
|
||||
|
||||
DNAList = create_DNAList()
|
||||
|
||||
# Messages:
|
||||
|
||||
Checks.raise_Warning_collectionSize(DNAList, collectionSize)
|
||||
|
||||
# Data stored in batchDataDictionary:
|
||||
DataDictionary["numNFTsGenerated"] = len(DNAList)
|
||||
DataDictionary["hierarchy"] = hierarchy
|
||||
DataDictionary["DNAList"] = DNAList
|
||||
|
||||
return DataDictionary
|
||||
|
||||
|
||||
def makeBatches(collectionSize, nftsPerBatch, save_path, batch_json_save_path):
|
||||
"""
|
||||
Sorts through all the batches and outputs a given number of batches depending on collectionSize and nftsPerBatch.
|
||||
These files are then saved as Batch#.json files to batch_json_save_path
|
||||
"""
|
||||
|
||||
# Clears the Batch Data folder of Batches:
|
||||
batchList = os.listdir(batch_json_save_path)
|
||||
if batchList:
|
||||
for i in batchList:
|
||||
batch = os.path.join(batch_json_save_path, i)
|
||||
if os.path.exists(batch):
|
||||
os.remove(
|
||||
os.path.join(batch_json_save_path, i)
|
||||
)
|
||||
|
||||
Blend_My_NFTs_Output = os.path.join(save_path, "Blend_My_NFTs Output", "NFT_Data")
|
||||
NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json")
|
||||
DataDictionary = json.load(open(NFTRecord_save_path))
|
||||
|
||||
numNFTsGenerated = DataDictionary["numNFTsGenerated"]
|
||||
hierarchy = DataDictionary["hierarchy"]
|
||||
DNAList = DataDictionary["DNAList"]
|
||||
|
||||
numBatches = collectionSize // nftsPerBatch
|
||||
remainder_dna = collectionSize % nftsPerBatch
|
||||
if remainder_dna > 0:
|
||||
numBatches += 1
|
||||
|
||||
print(f"To generate batches of {nftsPerBatch} DNA sequences per batch, with a total of {numNFTsGenerated}"
|
||||
f" possible NFT DNA sequences, the number of batches generated will be {numBatches}")
|
||||
|
||||
batches_dna_list = []
|
||||
|
||||
for i in range(numBatches):
|
||||
BatchDNAList = []
|
||||
if i != range(numBatches)[-1]:
|
||||
BatchDNAList = list(DNAList[0:nftsPerBatch])
|
||||
batches_dna_list.append(BatchDNAList)
|
||||
|
||||
DNAList = [x for x in DNAList if x not in BatchDNAList]
|
||||
else:
|
||||
BatchDNAList = DNAList
|
||||
|
||||
batchDictionary = {
|
||||
"NFTs_in_Batch": int(len(BatchDNAList)),
|
||||
"hierarchy": hierarchy,
|
||||
"BatchDNAList": BatchDNAList
|
||||
}
|
||||
|
||||
batchDictionary = json.dumps(batchDictionary, indent=1, ensure_ascii=True)
|
||||
|
||||
with open(os.path.join(batch_json_save_path, f"Batch{i + 1}.json"), "w") as outfile:
|
||||
outfile.write(batchDictionary)
|
||||
|
||||
|
||||
def send_To_Record_JSON(collectionSize, nftsPerBatch, save_path, enableRarity, enableLogic, logicFile, enableMaterials,
|
||||
materialsFile, Blend_My_NFTs_Output, batch_json_save_path):
|
||||
"""
|
||||
Creates NFTRecord.json file and sends "batchDataDictionary" to it. NFTRecord.json is a permanent record of all DNA
|
||||
you've generated with all attribute variants. If you add new variants or attributes to your .blend file, other scripts
|
||||
need to reference this .json file to generate new DNA and make note of the new attributes and variants to prevent
|
||||
repeate DNA.
|
||||
"""
|
||||
|
||||
# Checking Scene is compatible with BMNFTs:
|
||||
Checks.check_Scene()
|
||||
|
||||
# Messages:
|
||||
print(
|
||||
f"\n========================================\n"
|
||||
f"Creating NFT Data. Generating {collectionSize} NFT DNA.\n"
|
||||
)
|
||||
|
||||
if not enableRarity and not enableLogic:
|
||||
print(
|
||||
f"{bcolors.OK}NFT DNA will be determined randomly, no special properties or parameters are applied.\n{bcolors.RESET}")
|
||||
|
||||
if enableRarity:
|
||||
print(f"{bcolors.OK}Rarity is ON. Weights listed in .blend scene will be taken into account.\n{bcolors.RESET}")
|
||||
|
||||
if enableLogic:
|
||||
print(f"{bcolors.OK}Logic is ON. {len(list(logicFile.keys()))} rules detected and applied.\n{bcolors.RESET}")
|
||||
|
||||
time_start = time.time()
|
||||
|
||||
def create_nft_data():
|
||||
try:
|
||||
DataDictionary = generateNFT_DNA(collectionSize, enableRarity, enableLogic, logicFile, enableMaterials,
|
||||
materialsFile)
|
||||
NFTRecord_save_path = os.path.join(Blend_My_NFTs_Output, "NFTRecord.json")
|
||||
|
||||
# Checks:
|
||||
|
||||
Checks.raise_Warning_maxNFTs(nftsPerBatch, collectionSize)
|
||||
Checks.check_Duplicates(DataDictionary["DNAList"])
|
||||
Checks.raise_Error_ZeroCombinations()
|
||||
|
||||
if enableRarity:
|
||||
Checks.check_Rarity(DataDictionary["hierarchy"], DataDictionary["DNAList"],
|
||||
os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data"))
|
||||
|
||||
except FileNotFoundError:
|
||||
raise FileNotFoundError(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
|
||||
f"the naming conventions and scene structure. For more information, "
|
||||
f"see:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
finally:
|
||||
loading.stop()
|
||||
|
||||
try:
|
||||
ledger = json.dumps(DataDictionary, indent=1, ensure_ascii=True)
|
||||
with open(NFTRecord_save_path, 'w') as outfile:
|
||||
outfile.write(ledger + '\n')
|
||||
|
||||
print(
|
||||
f"\n{bcolors.OK}Blend_My_NFTs Success:\n"
|
||||
f"{len(DataDictionary['DNAList'])} NFT DNA saved to {NFTRecord_save_path}. NFT DNA Successfully created.\n{bcolors.RESET}")
|
||||
|
||||
except:
|
||||
raise (
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
|
||||
f"the naming conventions and scene structure. For more information, "
|
||||
f"see:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
|
||||
# Loading Animation:
|
||||
loading = Loader(f'Creating NFT DNA...', '').start()
|
||||
create_nft_data()
|
||||
makeBatches(collectionSize, nftsPerBatch, save_path, batch_json_save_path)
|
||||
loading.stop()
|
||||
|
||||
time_end = time.time()
|
||||
|
||||
print(
|
||||
f"{bcolors.OK}Created and saved NFT DNA in {time_end - time_start}s.\n{bcolors.RESET}"
|
||||
)
|
580
main/Exporter.py
580
main/Exporter.py
|
@ -1,580 +0,0 @@
|
|||
# Purpose:
|
||||
# This file takes a given Batch created by DNA_Generator.py and tells blender to render the image or export a 3D model to
|
||||
# the NFT_Output folder.
|
||||
|
||||
import bpy
|
||||
import os
|
||||
import ssl
|
||||
import time
|
||||
import json
|
||||
import smtplib
|
||||
import datetime
|
||||
import platform
|
||||
from .loading_animation import Loader
|
||||
from .Constants import bcolors, removeList, remove_file_by_extension
|
||||
from .Metadata import createCardanoMetadata, createSolanaMetaData, createErc721MetaData
|
||||
|
||||
|
||||
# Save info
|
||||
def save_batch(batch, file_name):
|
||||
saved_batch = json.dumps(batch, indent=1, ensure_ascii=True)
|
||||
|
||||
with open(os.path.join(file_name), 'w') as outfile:
|
||||
outfile.write(saved_batch + '\n')
|
||||
|
||||
|
||||
def save_generation_state(input):
|
||||
"""Saves date and time of generation start, and generation types; Images, Animations, 3D Models, and the file types for each."""
|
||||
file_name = os.path.join(input.batch_json_save_path, "Batch{}.json".format(input.batchToGenerate))
|
||||
batch = json.load(open(file_name))
|
||||
|
||||
CURRENT_TIME = datetime.datetime.now().strftime("%H:%M:%S")
|
||||
CURRENT_DATE = datetime.datetime.now().strftime("%d/%m/%Y")
|
||||
LOCAL_TIMEZONE = str(datetime.datetime.now(datetime.timezone.utc))
|
||||
|
||||
if "Generation Save" in batch:
|
||||
batch_save_number = int(batch[f"Generation Save"].index(batch[f"Generation Save"][-1]))
|
||||
else:
|
||||
batch_save_number = 0
|
||||
|
||||
batch["Generation Save"] = list()
|
||||
batch["Generation Save"].append({
|
||||
"Batch Save Number": batch_save_number + 1,
|
||||
"DNA Generated": None,
|
||||
"Generation Start Date and Time": [CURRENT_TIME, CURRENT_DATE, LOCAL_TIMEZONE],
|
||||
"Render_Settings": {
|
||||
"nftName": input.nftName,
|
||||
"save_path": input.save_path,
|
||||
"nftsPerBatch": input.nftsPerBatch,
|
||||
"batchToGenerate": input.batchToGenerate,
|
||||
"collectionSize": input.collectionSize,
|
||||
|
||||
"Blend_My_NFTs_Output": input.Blend_My_NFTs_Output,
|
||||
"batch_json_save_path": input.batch_json_save_path,
|
||||
"nftBatch_save_path": input.nftBatch_save_path,
|
||||
|
||||
"enableImages": input.enableImages,
|
||||
"imageFileFormat": input.imageFileFormat,
|
||||
|
||||
"enableAnimations": input.enableAnimations,
|
||||
"animationFileFormat": input.animationFileFormat,
|
||||
|
||||
"enableModelsBlender": input.enableModelsBlender,
|
||||
"modelFileFormat": input.modelFileFormat,
|
||||
|
||||
"enableCustomFields": input.enableCustomFields,
|
||||
|
||||
"cardanoMetaDataBool": input.cardanoMetaDataBool,
|
||||
"solanaMetaDataBool": input.solanaMetaDataBool,
|
||||
"erc721MetaData": input.erc721MetaData,
|
||||
|
||||
"cardano_description": input.cardano_description,
|
||||
"solana_description": input.solana_description,
|
||||
"erc721_description": input.erc721_description,
|
||||
|
||||
"enableMaterials": input.enableMaterials,
|
||||
"materialsFile": input.materialsFile,
|
||||
|
||||
"enableLogic": input.enableLogic,
|
||||
"enable_Logic_Json": input.enable_Logic_Json,
|
||||
"logicFile": input.logicFile,
|
||||
|
||||
"enableRarity": input.enableRarity,
|
||||
|
||||
"enableAutoShutdown": input.enableAutoShutdown,
|
||||
|
||||
"specify_timeBool": input.specify_timeBool,
|
||||
"hours": input.hours,
|
||||
"minutes": input.minutes,
|
||||
|
||||
"emailNotificationBool": input.emailNotificationBool,
|
||||
"sender_from": input.sender_from,
|
||||
"email_password": input.email_password,
|
||||
"receiver_to": input.receiver_to,
|
||||
|
||||
"custom_Fields": input.custom_Fields,
|
||||
},
|
||||
})
|
||||
|
||||
save_batch(batch, file_name)
|
||||
|
||||
|
||||
def save_completed(full_single_dna, a, x, batch_json_save_path, batchToGenerate):
|
||||
"""Saves progress of rendering to batch.json file."""
|
||||
|
||||
file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batchToGenerate))
|
||||
batch = json.load(open(file_name))
|
||||
index = batch["BatchDNAList"].index(a)
|
||||
batch["BatchDNAList"][index][full_single_dna]["Complete"] = True
|
||||
batch["Generation Save"][-1]["DNA Generated"] = x
|
||||
|
||||
save_batch(batch, file_name)
|
||||
|
||||
|
||||
# Exporter functions:
|
||||
def getBatchData(batchToGenerate, batch_json_save_path):
|
||||
"""
|
||||
Retrieves a given batches data determined by renderBatch in config.py
|
||||
"""
|
||||
|
||||
file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batchToGenerate))
|
||||
batch = json.load(open(file_name))
|
||||
|
||||
NFTs_in_Batch = batch["NFTs_in_Batch"]
|
||||
hierarchy = batch["hierarchy"]
|
||||
BatchDNAList = batch["BatchDNAList"]
|
||||
|
||||
return NFTs_in_Batch, hierarchy, BatchDNAList
|
||||
|
||||
|
||||
def render_and_save_NFTs(input):
|
||||
"""
|
||||
Renders the NFT DNA in a Batch#.json, where # is renderBatch in config.py. Turns off the viewport camera and
|
||||
the render camera for all items in hierarchy.
|
||||
"""
|
||||
|
||||
time_start_1 = time.time()
|
||||
|
||||
# If failed Batch is detected and user is resuming its generation:
|
||||
if input.fail_state:
|
||||
print(f"{bcolors.ERROR}\nResuming Failed Batch {input.failed_batch}\n{bcolors.RESET}")
|
||||
NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.failed_batch, input.batch_json_save_path)
|
||||
for a in range(input.failed_dna):
|
||||
del BatchDNAList[0]
|
||||
x = input.failed_dna + 1
|
||||
|
||||
# If user is generating the normal way:
|
||||
else:
|
||||
print(f"\nGenerating Batch {input.batchToGenerate}\n")
|
||||
NFTs_in_Batch, hierarchy, BatchDNAList = getBatchData(input.batchToGenerate, input.batch_json_save_path)
|
||||
save_generation_state(input)
|
||||
x = 1
|
||||
|
||||
if input.enableMaterials:
|
||||
materialsFile = json.load(open(input.materialsFile))
|
||||
|
||||
for a in BatchDNAList:
|
||||
full_single_dna = list(a.keys())[0]
|
||||
Order_Num = a[full_single_dna]['Order_Num']
|
||||
|
||||
# Material handling:
|
||||
if input.enableMaterials:
|
||||
single_dna, material_dna = full_single_dna.split(':')
|
||||
|
||||
if not input.enableMaterials:
|
||||
single_dna = full_single_dna
|
||||
|
||||
def match_DNA_to_Variant(single_dna):
|
||||
"""
|
||||
Matches each DNA number separated by "-" to its attribute, then its variant.
|
||||
"""
|
||||
|
||||
listAttributes = list(hierarchy.keys())
|
||||
listDnaDecunstructed = single_dna.split('-')
|
||||
dnaDictionary = {}
|
||||
|
||||
for i, j in zip(listAttributes, listDnaDecunstructed):
|
||||
dnaDictionary[i] = j
|
||||
|
||||
for x in dnaDictionary:
|
||||
for k in hierarchy[x]:
|
||||
kNum = hierarchy[x][k]["number"]
|
||||
if kNum == dnaDictionary[x]:
|
||||
dnaDictionary.update({x: k})
|
||||
return dnaDictionary
|
||||
|
||||
def match_materialDNA_to_Material(single_dna, material_dna, materialsFile):
|
||||
"""
|
||||
Matches the Material DNA to it's selected Materials unless a 0 is present meaning no material for that variant was selected.
|
||||
"""
|
||||
listAttributes = list(hierarchy.keys())
|
||||
listDnaDecunstructed = single_dna.split('-')
|
||||
listMaterialDNADeconstructed = material_dna.split('-')
|
||||
|
||||
full_dna_dict = {}
|
||||
|
||||
for attribute, variant, material in zip(listAttributes, listDnaDecunstructed, listMaterialDNADeconstructed):
|
||||
|
||||
for var in hierarchy[attribute]:
|
||||
if hierarchy[attribute][var]['number'] == variant:
|
||||
variant = var
|
||||
|
||||
if material != '0': # If material is not empty
|
||||
for variant_m in materialsFile:
|
||||
if variant == variant_m:
|
||||
# Getting Materials name from Materials index in the Materials List
|
||||
materials_list = list(materialsFile[variant_m]["Material List"].keys())
|
||||
|
||||
material = materials_list[int(material) - 1] # Subtract 1 because '0' means empty mat
|
||||
break
|
||||
|
||||
full_dna_dict[variant] = material
|
||||
|
||||
return full_dna_dict
|
||||
|
||||
metadataMaterialDict = {}
|
||||
|
||||
if input.enableMaterials:
|
||||
materialdnaDictionary = match_materialDNA_to_Material(single_dna, material_dna, materialsFile)
|
||||
|
||||
for var_mat in list(materialdnaDictionary.keys()):
|
||||
if materialdnaDictionary[var_mat] != '0':
|
||||
if not materialsFile[var_mat]['Variant Objects']:
|
||||
"""
|
||||
If objects to apply material to not specified, apply to all objects in Variant collection.
|
||||
"""
|
||||
metadataMaterialDict[var_mat] = materialdnaDictionary[var_mat]
|
||||
|
||||
for obj in bpy.data.collections[var_mat].all_objects:
|
||||
selected_object = bpy.data.objects.get(obj.name)
|
||||
selected_object.active_material = bpy.data.materials[materialdnaDictionary[var_mat]]
|
||||
|
||||
if materialsFile[var_mat]['Variant Objects']:
|
||||
"""
|
||||
If objects to apply material to are specified, apply material only to objects specified withing the Variant collection.
|
||||
"""
|
||||
metadataMaterialDict[var_mat] = materialdnaDictionary[var_mat]
|
||||
|
||||
for obj in materialsFile[var_mat]['Variant Objects']:
|
||||
selected_object = bpy.data.objects.get(obj)
|
||||
selected_object.active_material = bpy.data.materials[materialdnaDictionary[var_mat]]
|
||||
|
||||
# Turn off render camera and viewport camera for all collections in hierarchy
|
||||
for i in hierarchy:
|
||||
for j in hierarchy[i]:
|
||||
try:
|
||||
bpy.data.collections[j].hide_render = True
|
||||
bpy.data.collections[j].hide_viewport = True
|
||||
except KeyError:
|
||||
raise TypeError(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes to "
|
||||
f"your .blned file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read your scene."
|
||||
f"For more information see:{bcolors.RESET}"
|
||||
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
|
||||
dnaDictionary = match_DNA_to_Variant(single_dna)
|
||||
name = input.nftName + "_" + str(Order_Num)
|
||||
|
||||
# Change Text Object in Scene to match DNA string:
|
||||
# Variables that can be used: full_single_dna, name, Order_Num
|
||||
# ob = bpy.data.objects['Text'] # Object name
|
||||
# ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob
|
||||
|
||||
print(f"\n{bcolors.OK}|--- Generating NFT {x}/{NFTs_in_Batch}: {name} ---|{bcolors.RESET}")
|
||||
print(f"DNA attribute list:\n{dnaDictionary}\nDNA Code:{single_dna}")
|
||||
|
||||
for c in dnaDictionary:
|
||||
collection = dnaDictionary[c]
|
||||
if collection != '0':
|
||||
bpy.data.collections[collection].hide_render = False
|
||||
bpy.data.collections[collection].hide_viewport = False
|
||||
|
||||
time_start_2 = time.time()
|
||||
|
||||
# Main paths for batch subfolders:
|
||||
batchFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate))
|
||||
|
||||
imageFolder = os.path.join(batchFolder, "Images")
|
||||
animationFolder = os.path.join(batchFolder, "Animations")
|
||||
modelFolder = os.path.join(batchFolder, "Models")
|
||||
BMNFT_metaData_Folder = os.path.join(batchFolder, "BMNFT_metadata")
|
||||
|
||||
imagePath = os.path.join(imageFolder, name)
|
||||
animationPath = os.path.join(animationFolder, name)
|
||||
modelPath = os.path.join(modelFolder, name)
|
||||
|
||||
cardanoMetadataPath = os.path.join(batchFolder, "Cardano_metadata")
|
||||
solanaMetadataPath = os.path.join(batchFolder, "Solana_metadata")
|
||||
erc721MetadataPath = os.path.join(batchFolder, "Erc721_metadata")
|
||||
|
||||
|
||||
def check_failed_exists(file_path):
|
||||
# Delete a file if a fail state is detected and if the file being re-generated already exists. Prevents
|
||||
# animations from corrupting.
|
||||
|
||||
if input.fail_state:
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
|
||||
# Generation/Rendering:
|
||||
if input.enableImages:
|
||||
|
||||
print(f"{bcolors.OK}---Image---{bcolors.RESET}")
|
||||
|
||||
image_render_time_start = time.time()
|
||||
|
||||
check_failed_exists(imagePath)
|
||||
|
||||
def render_image():
|
||||
if not os.path.exists(imageFolder):
|
||||
os.makedirs(imageFolder)
|
||||
|
||||
bpy.context.scene.render.filepath = imagePath
|
||||
bpy.context.scene.render.image_settings.file_format = input.imageFileFormat
|
||||
bpy.ops.render.render(write_still=True)
|
||||
|
||||
# Loading Animation:
|
||||
loading = Loader(f'Rendering Image {x}/{NFTs_in_Batch}...', '').start()
|
||||
render_image()
|
||||
loading.stop()
|
||||
|
||||
image_render_time_end = time.time()
|
||||
|
||||
print(
|
||||
f"{bcolors.OK}Rendered image in {image_render_time_end - image_render_time_start}s.\n{bcolors.RESET}"
|
||||
)
|
||||
|
||||
if input.enableAnimations:
|
||||
print(f"{bcolors.OK}---Animation---{bcolors.RESET}")
|
||||
|
||||
animation_render_time_start = time.time()
|
||||
|
||||
check_failed_exists(animationPath)
|
||||
|
||||
def render_animation():
|
||||
if not os.path.exists(animationFolder):
|
||||
os.makedirs(animationFolder)
|
||||
|
||||
if input.animationFileFormat == "MP4":
|
||||
bpy.context.scene.render.filepath = animationPath
|
||||
bpy.context.scene.render.image_settings.file_format = "FFMPEG"
|
||||
|
||||
bpy.context.scene.render.ffmpeg.format = 'MPEG4'
|
||||
bpy.context.scene.render.ffmpeg.codec = 'H264'
|
||||
bpy.ops.render.render(animation=True)
|
||||
|
||||
elif input.animationFileFormat == 'PNG':
|
||||
if not os.path.exists(animationPath):
|
||||
os.makedirs(animationPath)
|
||||
|
||||
bpy.context.scene.render.filepath = os.path.join(animationPath, name)
|
||||
bpy.context.scene.render.image_settings.file_format = input.animationFileFormat
|
||||
bpy.ops.render.render(animation=True)
|
||||
|
||||
elif input.animationFileFormat == 'TIFF':
|
||||
if not os.path.exists(animationPath):
|
||||
os.makedirs(animationPath)
|
||||
|
||||
bpy.context.scene.render.filepath = os.path.join(animationPath, name)
|
||||
bpy.context.scene.render.image_settings.file_format = input.animationFileFormat
|
||||
bpy.ops.render.render(animation=True)
|
||||
|
||||
else:
|
||||
bpy.context.scene.render.filepath = animationPath
|
||||
bpy.context.scene.render.image_settings.file_format = input.animationFileFormat
|
||||
bpy.ops.render.render(animation=True)
|
||||
|
||||
# Loading Animation:
|
||||
loading = Loader(f'Rendering Animation {x}/{NFTs_in_Batch}...', '').start()
|
||||
render_animation()
|
||||
loading.stop()
|
||||
|
||||
animation_render_time_end = time.time()
|
||||
|
||||
print(
|
||||
f"{bcolors.OK}Rendered animation in {animation_render_time_end - animation_render_time_start}s.\n{bcolors.RESET}"
|
||||
)
|
||||
|
||||
if input.enableModelsBlender:
|
||||
print(f"{bcolors.OK}---3D Model---{bcolors.RESET}")
|
||||
|
||||
model_generation_time_start = time.time()
|
||||
|
||||
def generate_models():
|
||||
if not os.path.exists(modelFolder):
|
||||
os.makedirs(modelFolder)
|
||||
|
||||
for i in dnaDictionary:
|
||||
coll = dnaDictionary[i]
|
||||
if coll != '0':
|
||||
for obj in bpy.data.collections[coll].all_objects:
|
||||
obj.select_set(True)
|
||||
|
||||
for obj in bpy.data.collections['Script_Ignore'].all_objects:
|
||||
obj.select_set(True)
|
||||
|
||||
# Remove objects from 3D model export:
|
||||
# remove_objects: list = [
|
||||
# ]
|
||||
#
|
||||
# for obj in bpy.data.objects:
|
||||
# if obj.name in remove_objects:
|
||||
# obj.select_set(False)
|
||||
|
||||
if input.modelFileFormat == 'GLB':
|
||||
check_failed_exists(f"{modelPath}.glb")
|
||||
bpy.ops.export_scene.gltf(filepath=f"{modelPath}.glb",
|
||||
check_existing=True,
|
||||
export_format='GLB',
|
||||
export_keep_originals=True,
|
||||
use_selection=True)
|
||||
if input.modelFileFormat == 'GLTF_SEPARATE':
|
||||
check_failed_exists(f"{modelPath}.gltf")
|
||||
check_failed_exists(f"{modelPath}.bin")
|
||||
bpy.ops.export_scene.gltf(filepath=f"{modelPath}",
|
||||
check_existing=True,
|
||||
export_format='GLTF_SEPARATE',
|
||||
export_keep_originals=True,
|
||||
use_selection=True)
|
||||
if input.modelFileFormat == 'GLTF_EMBEDDED':
|
||||
check_failed_exists(f"{modelPath}.gltf")
|
||||
bpy.ops.export_scene.gltf(filepath=f"{modelPath}.gltf",
|
||||
check_existing=True,
|
||||
export_format='GLTF_EMBEDDED',
|
||||
export_keep_originals=True,
|
||||
use_selection=True)
|
||||
elif input.modelFileFormat == 'FBX':
|
||||
check_failed_exists(f"{modelPath}.fbx")
|
||||
bpy.ops.export_scene.fbx(filepath=f"{modelPath}.fbx",
|
||||
check_existing=True,
|
||||
use_selection=True)
|
||||
elif input.modelFileFormat == 'OBJ':
|
||||
check_failed_exists(f"{modelPath}.obj")
|
||||
bpy.ops.export_scene.obj(filepath=f"{modelPath}.obj",
|
||||
check_existing=True,
|
||||
use_selection=True, )
|
||||
elif input.modelFileFormat == 'X3D':
|
||||
check_failed_exists(f"{modelPath}.x3d")
|
||||
bpy.ops.export_scene.x3d(filepath=f"{modelPath}.x3d",
|
||||
check_existing=True,
|
||||
use_selection=True)
|
||||
elif input.modelFileFormat == 'STL':
|
||||
check_failed_exists(f"{modelPath}.stl")
|
||||
bpy.ops.export_mesh.stl(filepath=f"{modelPath}.stl",
|
||||
check_existing=True,
|
||||
use_selection=True)
|
||||
elif input.modelFileFormat == 'VOX':
|
||||
check_failed_exists(f"{modelPath}.vox")
|
||||
bpy.ops.export_vox.some_data(filepath=f"{modelPath}.vox")
|
||||
|
||||
# Loading Animation:
|
||||
loading = Loader(f'Generating 3D model {x}/{NFTs_in_Batch}...', '').start()
|
||||
generate_models()
|
||||
loading.stop()
|
||||
|
||||
model_generation_time_end = time.time()
|
||||
|
||||
print(
|
||||
f"{bcolors.OK}Generated 3D model in {model_generation_time_end - model_generation_time_start}s.\n{bcolors.RESET}"
|
||||
)
|
||||
|
||||
# Generating Metadata:
|
||||
if input.cardanoMetaDataBool:
|
||||
if not os.path.exists(cardanoMetadataPath):
|
||||
os.makedirs(cardanoMetadataPath)
|
||||
createCardanoMetadata(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict,
|
||||
input.custom_Fields,
|
||||
input.enableCustomFields, input.cardano_description, cardanoMetadataPath)
|
||||
|
||||
if input.solanaMetaDataBool:
|
||||
if not os.path.exists(solanaMetadataPath):
|
||||
os.makedirs(solanaMetadataPath)
|
||||
createSolanaMetaData(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict,
|
||||
input.custom_Fields,
|
||||
input.enableCustomFields, input.solana_description, solanaMetadataPath)
|
||||
|
||||
if input.erc721MetaData:
|
||||
if not os.path.exists(erc721MetadataPath):
|
||||
os.makedirs(erc721MetadataPath)
|
||||
createErc721MetaData(name, Order_Num, full_single_dna, dnaDictionary, metadataMaterialDict,
|
||||
input.custom_Fields,
|
||||
input.enableCustomFields, input.erc721_description, erc721MetadataPath)
|
||||
|
||||
if not os.path.exists(BMNFT_metaData_Folder):
|
||||
os.makedirs(BMNFT_metaData_Folder)
|
||||
|
||||
for b in dnaDictionary:
|
||||
if dnaDictionary[b] == "0":
|
||||
dnaDictionary[b] = "Empty"
|
||||
|
||||
metaDataDict = {"name": name, "NFT_DNA": a, "NFT_Variants": dnaDictionary,
|
||||
"Material_Attributes": metadataMaterialDict}
|
||||
|
||||
jsonMetaData = json.dumps(metaDataDict, indent=1, ensure_ascii=True)
|
||||
|
||||
with open(os.path.join(BMNFT_metaData_Folder, "Data_" + name + ".json"), 'w') as outfile:
|
||||
outfile.write(jsonMetaData + '\n')
|
||||
|
||||
print(f"Completed {name} render in {time.time() - time_start_2}s")
|
||||
|
||||
save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batchToGenerate)
|
||||
|
||||
x += 1
|
||||
|
||||
for i in hierarchy:
|
||||
for j in hierarchy[i]:
|
||||
bpy.data.collections[j].hide_render = False
|
||||
bpy.data.collections[j].hide_viewport = False
|
||||
|
||||
batch_complete_time = time.time() - time_start_1
|
||||
|
||||
print(f"\nAll NFTs successfully generated and sent to {input.nftBatch_save_path}"
|
||||
f"\nCompleted all renders in Batch{input.batchToGenerate}.json in {batch_complete_time}s\n")
|
||||
|
||||
batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1,
|
||||
"Average time per generation": batch_complete_time / x - 1}
|
||||
|
||||
batch_infoFolder = os.path.join(input.nftBatch_save_path, "Batch" + str(input.batchToGenerate), "batch_info.json")
|
||||
save_batch(batch_info, batch_infoFolder)
|
||||
|
||||
# Send Email that Batch is complete:
|
||||
if input.emailNotificationBool:
|
||||
port = 465 # For SSL
|
||||
smtp_server = "smtp.gmail.com"
|
||||
sender_email = input.sender_from # Enter your address
|
||||
receiver_email = input.receiver_to # Enter receiver address
|
||||
password = input.email_password
|
||||
|
||||
# Get batch info for message:
|
||||
if input.fail_state:
|
||||
batch = input.fail_state
|
||||
batchData = getBatchData(input.failed_batch, input.batch_json_save_path)
|
||||
|
||||
else:
|
||||
batchData = getBatchData(input.batchToGenerate, input.batch_json_save_path)
|
||||
|
||||
batch = input.batchToGenerate
|
||||
|
||||
generation_time = str(datetime.timedelta(seconds=batch_complete_time))
|
||||
|
||||
message = f"""\
|
||||
Subject: Batch {batch} completed {x - 1} NFTs in {generation_time} (h:m:s)
|
||||
|
||||
Generation Time:
|
||||
{generation_time.split(':')[0]} Hours, {generation_time.split(':')[1]} Minutes, {generation_time.split(':')[2]} Seconds
|
||||
Batch Data:
|
||||
|
||||
{batchData}
|
||||
|
||||
This message was sent from an instance of the Blend_My_NFTs Blender add-on.
|
||||
"""
|
||||
|
||||
context = ssl.create_default_context()
|
||||
with smtplib.SMTP_SSL(smtp_server, port, context=context) as server:
|
||||
server.login(sender_email, password)
|
||||
server.sendmail(sender_email, receiver_email, message)
|
||||
|
||||
# Automatic Shutdown:
|
||||
# If user selects automatic shutdown but did not specify time after Batch completion
|
||||
def shutdown(time):
|
||||
plateform = platform.system()
|
||||
|
||||
if plateform == "Windows":
|
||||
os.system(f"shutdown /s /t {time}")
|
||||
if plateform == "Darwin":
|
||||
os.system(f"shutdown /s /t {time}")
|
||||
|
||||
if input.enableAutoShutdown and not input.specify_timeBool:
|
||||
shutdown(0)
|
||||
|
||||
# If user selects automatic shutdown and specify time after Batch completion
|
||||
if input.enableAutoShutdown and input.specify_timeBool:
|
||||
hours = (int(input.hours) / 60) / 60
|
||||
minutes = int(input.minutes) / 60
|
||||
total_sleep_time = hours + minutes
|
||||
|
||||
# time.sleep(total_sleep_time)
|
||||
|
||||
shutdown(total_sleep_time)
|
|
@ -1,79 +0,0 @@
|
|||
import json
|
||||
import bpy
|
||||
|
||||
from main import DNA_Generator, Exporter
|
||||
|
||||
|
||||
def send_To_Record_JSON(input, reverse_order=False):
|
||||
if input.enableLogic:
|
||||
if input.enable_Logic_Json and input.logicFile:
|
||||
input.logicFile = json.load(open(input.logicFile))
|
||||
|
||||
if input.enable_Logic_Json and not input.logicFile:
|
||||
print({'ERROR'}, f"No Logic.json file path set. Please set the file path to your Logic.json file.")
|
||||
|
||||
if not input.enable_Logic_Json:
|
||||
scn = bpy.context.scene
|
||||
if reverse_order:
|
||||
input.logicFile = {}
|
||||
num = 1
|
||||
for i in range(scn.logic_fields_index, -1, -1):
|
||||
item = scn.logic_fields[i]
|
||||
|
||||
item_list1 = item.item_list1
|
||||
rule_type = item.rule_type
|
||||
item_list2 = item.item_list2
|
||||
input.logicFile[f"Rule-{num}"] = {
|
||||
"IF": item_list1.split(','),
|
||||
rule_type: item_list2.split(',')
|
||||
}
|
||||
print(rule_type)
|
||||
num += 1
|
||||
else:
|
||||
input.logicFile = {}
|
||||
num = 1
|
||||
for item in scn.logic_fields:
|
||||
item_list1 = item.item_list1
|
||||
rule_type = item.rule_type
|
||||
item_list2 = item.item_list2
|
||||
input.logicFile[f"Rule-{num}"] = {
|
||||
"IF": item_list1.split(','),
|
||||
rule_type: item_list2.split(',')
|
||||
}
|
||||
print(rule_type)
|
||||
|
||||
num += 1
|
||||
|
||||
DNA_Generator.send_To_Record_JSON(input.collectionSize,
|
||||
input.nftsPerBatch,
|
||||
input.save_path,
|
||||
input.enableRarity,
|
||||
input.enableLogic,
|
||||
input.logicFile,
|
||||
input.enableMaterials,
|
||||
input.materialsFile,
|
||||
input.Blend_My_NFTs_Output,
|
||||
input.batch_json_save_path
|
||||
)
|
||||
|
||||
|
||||
def render_and_save_NFTs(input, reverse_order=False):
|
||||
if input.enableCustomFields:
|
||||
scn = bpy.context.scene
|
||||
if reverse_order:
|
||||
for i in range(scn.custom_metadata_fields_index, -1, -1):
|
||||
item = scn.custom_metadata_fields[i]
|
||||
if item.field_name in list(input.custom_Fields.keys()):
|
||||
raise ValueError(
|
||||
f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names are unique.")
|
||||
else:
|
||||
input.custom_Fields[item.field_name] = item.field_value
|
||||
else:
|
||||
for item in scn.custom_metadata_fields:
|
||||
if item.field_name in list(input.custom_Fields.keys()):
|
||||
raise ValueError(
|
||||
f"A duplicate of '{item.field_name}' was found. Please ensure all Custom Metadata field Names are unique.")
|
||||
else:
|
||||
input.custom_Fields[item.field_name] = item.field_value
|
||||
|
||||
Exporter.render_and_save_NFTs(input)
|
|
@ -1,134 +0,0 @@
|
|||
# Purpose:
|
||||
# The purpose of this file is to apply the materials a user sets in a given .json file to the Variant collection objects
|
||||
# also specified in the .json file. The Materialized DNA is then returned in the following format: 1-1-1:1-1-1
|
||||
# Where the numbers right of the ":" are the material numbers applied to the respective Variants to the left of the ":"
|
||||
|
||||
import bpy
|
||||
|
||||
import json
|
||||
import random
|
||||
from .Constants import bcolors, removeList, remove_file_by_extension, save_result
|
||||
|
||||
|
||||
def select_material(materialList, variant, enableRarity):
|
||||
"""Selects a material from a passed material list. """
|
||||
material_List_Of_i = [] # List of Material names instead of order numbers
|
||||
rarity_List_Of_i = []
|
||||
ifZeroBool = None
|
||||
|
||||
for material in materialList:
|
||||
# Material Order Number comes from index in the Material List in materials.json for a given Variant.
|
||||
# material_order_num = list(materialList.keys()).index(material)
|
||||
|
||||
material_List_Of_i.append(material)
|
||||
|
||||
material_rarity_percent = materialList[material]
|
||||
rarity_List_Of_i.append(float(material_rarity_percent))
|
||||
|
||||
print(f"MATERIAL_LIST_OF_I:{material_List_Of_i}")
|
||||
print(f"RARITY_LIST_OF_I:{rarity_List_Of_i}")
|
||||
|
||||
for b in rarity_List_Of_i:
|
||||
if b == 0:
|
||||
ifZeroBool = True
|
||||
elif b != 0:
|
||||
ifZeroBool = False
|
||||
|
||||
if enableRarity:
|
||||
try:
|
||||
if ifZeroBool:
|
||||
selected_material = random.choices(material_List_Of_i, k=1)
|
||||
elif not ifZeroBool:
|
||||
selected_material = random.choices(material_List_Of_i, weights=rarity_List_Of_i, k=1)
|
||||
except IndexError:
|
||||
raise IndexError(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"An issue was found within the Material List of the Variant collection '{variant}'. For more information on Blend_My_NFTs compatible scenes, "
|
||||
f"see:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
else:
|
||||
try:
|
||||
selected_material = random.choices(material_List_Of_i, k=1)
|
||||
except IndexError:
|
||||
raise IndexError(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"An issue was found within the Material List of the Variant collection '{variant}'. For more information on Blend_My_NFTs compatible scenes, "
|
||||
f"see:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
|
||||
return selected_material[0], materialList
|
||||
|
||||
def get_variant_att_index(variant, hierarchy):
|
||||
variant_attribute = None
|
||||
|
||||
for attribute in hierarchy:
|
||||
for variant_h in hierarchy[attribute]:
|
||||
if variant_h == variant:
|
||||
variant_attribute = attribute
|
||||
|
||||
attribute_index = list(hierarchy.keys()).index(variant_attribute)
|
||||
variant_order_num = variant.split("_")[1]
|
||||
return attribute_index, variant_order_num
|
||||
|
||||
def match_DNA_to_Variant(hierarchy, singleDNA):
|
||||
"""
|
||||
Matches each DNA number separated by "-" to its attribute, then its variant.
|
||||
"""
|
||||
|
||||
listAttributes = list(hierarchy.keys())
|
||||
listDnaDecunstructed = singleDNA.split('-')
|
||||
dnaDictionary = {}
|
||||
|
||||
for i, j in zip(listAttributes, listDnaDecunstructed):
|
||||
dnaDictionary[i] = j
|
||||
|
||||
for x in dnaDictionary:
|
||||
for k in hierarchy[x]:
|
||||
kNum = hierarchy[x][k]["number"]
|
||||
if kNum == dnaDictionary[x]:
|
||||
dnaDictionary.update({x: k})
|
||||
return dnaDictionary
|
||||
|
||||
def apply_materials(hierarchy, singleDNA, materialsFile, enableRarity):
|
||||
"""
|
||||
DNA with applied material example: "1-1:1-1" <Normal DNA>:<Selected Material for each Variant>
|
||||
|
||||
The Material DNA will select the material for the Variant order number in the NFT DNA based on the Variant Material
|
||||
list in the Variant_Material.json file.
|
||||
"""
|
||||
|
||||
singleDNADict = match_DNA_to_Variant(hierarchy, singleDNA)
|
||||
materialsFile = json.load(open(materialsFile))
|
||||
deconstructed_MaterialDNA = {}
|
||||
|
||||
for a in singleDNADict:
|
||||
complete = False
|
||||
for b in materialsFile:
|
||||
if singleDNADict[a] == b:
|
||||
material_name, materialList, = select_material(materialsFile[b]['Material List'], b, enableRarity)
|
||||
material_order_num = list(materialList.keys()).index(material_name) # Gets the Order Number of the Material
|
||||
deconstructed_MaterialDNA[a] = str(material_order_num + 1)
|
||||
complete = True
|
||||
if not complete:
|
||||
deconstructed_MaterialDNA[a] = "0"
|
||||
|
||||
# This section is now incorrect and needs updating:
|
||||
|
||||
# Make Attributes have the same materials:
|
||||
# Order your Attributes alphabetically, then assign each Attribute a number, starting with 0. So Attribute 'A' = 0,
|
||||
# Attribute 'B' = 1, 'C' = 2, 'D' = 3, etc. For each pair you want to equal another, add its number it to this list:
|
||||
# synced_material_attributes = [1, 2]
|
||||
#
|
||||
# first_mat = deconstructed_MaterialDNA[synced_material_attributes[0]]
|
||||
# for i in synced_material_attributes:
|
||||
# deconstructed_MaterialDNA[i] = first_mat
|
||||
|
||||
material_DNA = ""
|
||||
for a in deconstructed_MaterialDNA:
|
||||
num = "-" + str(deconstructed_MaterialDNA[a])
|
||||
material_DNA += num
|
||||
material_DNA = ''.join(material_DNA.split('-', 1))
|
||||
|
||||
return f"{singleDNA}:{material_DNA}"
|
143
main/Metadata.py
143
main/Metadata.py
|
@ -1,143 +0,0 @@
|
|||
# Some code in this file was generously sponsored by the amazing team over at SolSweepers!
|
||||
# Feel free to check out their amazing project and see how they are using Blend_My_NFTs:
|
||||
# https://discord.gg/QTT7dzcuVs
|
||||
|
||||
# Purpose:
|
||||
# This file returns the specified meta data format to the Exporter.py for a given NFT DNA.
|
||||
|
||||
import bpy
|
||||
import os
|
||||
import json
|
||||
|
||||
def sendMetaDataToJson(metaDataDict, save_path, file_name):
|
||||
jsonMetaData = json.dumps(metaDataDict, indent=1, ensure_ascii=True)
|
||||
with open(os.path.join(save_path, f"{file_name}.json"), 'w') as outfile:
|
||||
outfile.write(jsonMetaData + '\n')
|
||||
|
||||
def stripNums(variant):
|
||||
variant = str(variant).split('_')[0]
|
||||
return variant
|
||||
|
||||
# Cardano Template
|
||||
def createCardanoMetadata(name, Order_Num, NFT_DNA, NFT_Variants, Material_Attributes,
|
||||
custom_Fields, enableCustomFields, cardano_description, cardanoMetadataPath):
|
||||
|
||||
metaDataDictCardano = {"721": {
|
||||
"<policy_id>": {
|
||||
name: {
|
||||
"name": name,
|
||||
"image": "<ipfs_link>",
|
||||
"mediaType": "<mime_type>",
|
||||
"description": cardano_description,
|
||||
}
|
||||
},
|
||||
"version": "1.0"
|
||||
}}
|
||||
|
||||
# Variants and Attributes:
|
||||
for i in NFT_Variants:
|
||||
metaDataDictCardano["721"]["<policy_id>"][name][i] = stripNums(NFT_Variants[i])
|
||||
|
||||
# Material Variants and Attributes:
|
||||
for i in Material_Attributes:
|
||||
metaDataDictCardano["721"]["<policy_id>"][name][i] = Material_Attributes[i]
|
||||
|
||||
# Custom Fields:
|
||||
if enableCustomFields:
|
||||
for i in custom_Fields:
|
||||
metaDataDictCardano["721"]["<policy_id>"][name][i] = custom_Fields[i]
|
||||
|
||||
sendMetaDataToJson(metaDataDictCardano, cardanoMetadataPath, name)
|
||||
|
||||
|
||||
# Solana Template
|
||||
def createSolanaMetaData(name, Order_Num, NFT_DNA, NFT_Variants, Material_Attributes, custom_Fields, enableCustomFields,
|
||||
solana_description, solanaMetadataPath):
|
||||
metaDataDictSolana = {"name": name, "symbol": "", "description": solana_description, "seller_fee_basis_points": None,
|
||||
"image": "", "animation_url": "", "external_url": ""}
|
||||
|
||||
attributes = []
|
||||
|
||||
# Variant and Attributes:
|
||||
for i in NFT_Variants:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": stripNums(NFT_Variants[i])
|
||||
}
|
||||
attributes.append(dictionary)
|
||||
|
||||
# Material Variants and Attributes:
|
||||
for i in Material_Attributes:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": Material_Attributes[i]
|
||||
}
|
||||
attributes.append(dictionary)
|
||||
|
||||
# Custom Fields:
|
||||
if enableCustomFields:
|
||||
for i in custom_Fields:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": custom_Fields[i]
|
||||
}
|
||||
attributes.append(dictionary)
|
||||
|
||||
metaDataDictSolana["attributes"] = attributes
|
||||
metaDataDictSolana["collection"] = {
|
||||
"name": "",
|
||||
"family": ""
|
||||
}
|
||||
|
||||
metaDataDictSolana["properties"] = {
|
||||
"files": [{"uri": "", "type": ""}],
|
||||
"category": "",
|
||||
"creators": [{"address": "", "share": None}]
|
||||
}
|
||||
|
||||
sendMetaDataToJson(metaDataDictSolana, solanaMetadataPath, name)
|
||||
|
||||
|
||||
# ERC721 Template
|
||||
def createErc721MetaData(name, Order_Num, NFT_DNA, NFT_Variants, Material_Attributes, custom_Fields, enableCustomFields,
|
||||
erc721_description, erc721MetadataPath):
|
||||
metaDataDictErc721 = {
|
||||
"name": name,
|
||||
"description": erc721_description,
|
||||
"image": "",
|
||||
"attributes": None,
|
||||
}
|
||||
|
||||
attributes = []
|
||||
|
||||
# Variants and Attributes:
|
||||
for i in NFT_Variants:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": stripNums(NFT_Variants[i])
|
||||
}
|
||||
|
||||
attributes.append(dictionary)
|
||||
|
||||
# Material Variants and Attributes:
|
||||
for i in Material_Attributes:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": Material_Attributes[i]
|
||||
}
|
||||
|
||||
attributes.append(dictionary)
|
||||
|
||||
# Custom Fields:
|
||||
if enableCustomFields:
|
||||
for i in custom_Fields:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": custom_Fields[i]
|
||||
}
|
||||
attributes.append(dictionary)
|
||||
|
||||
metaDataDictErc721["attributes"] = attributes
|
||||
|
||||
sendMetaDataToJson(metaDataDictErc721, erc721MetadataPath, name)
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
# Purpose:
|
||||
# This file sorts the Variants in DNA slots based on the rarity value set in the name.
|
||||
|
||||
import bpy
|
||||
import random
|
||||
|
||||
from .Constants import bcolors, removeList, remove_file_by_extension
|
||||
|
||||
|
||||
def createDNArarity(hierarchy):
|
||||
"""
|
||||
Sorts through DataDictionary and appropriately weights each variant based on their rarity percentage set in Blender
|
||||
("rarity" in DNA_Generator). Then
|
||||
"""
|
||||
singleDNA = ""
|
||||
|
||||
for i in hierarchy:
|
||||
number_List_Of_i = []
|
||||
rarity_List_Of_i = []
|
||||
ifZeroBool = None
|
||||
|
||||
for k in hierarchy[i]:
|
||||
number = hierarchy[i][k]["number"]
|
||||
number_List_Of_i.append(number)
|
||||
|
||||
rarity = hierarchy[i][k]["rarity"]
|
||||
rarity_List_Of_i.append(float(rarity))
|
||||
|
||||
for x in rarity_List_Of_i:
|
||||
if x == 0:
|
||||
ifZeroBool = True
|
||||
elif x != 0:
|
||||
ifZeroBool = False
|
||||
|
||||
try:
|
||||
if ifZeroBool:
|
||||
variantByNum = random.choices(number_List_Of_i, k=1)
|
||||
elif not ifZeroBool:
|
||||
variantByNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1)
|
||||
except IndexError:
|
||||
raise IndexError(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"An issue was found within the Attribute collection '{i}'. For more information on Blend_My_NFTs compatible scenes, "
|
||||
f"see:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
|
||||
singleDNA += "-" + str(variantByNum[0])
|
||||
singleDNA = ''.join(singleDNA.split('-', 1))
|
||||
return singleDNA
|
|
@ -1,48 +0,0 @@
|
|||
# Purpose:
|
||||
# This file goes through all batches, renames, and sorts all nft files to a Complete_Collection folder in Blend_My_NFTs
|
||||
|
||||
import bpy
|
||||
import os
|
||||
import json
|
||||
import shutil
|
||||
|
||||
from .Constants import bcolors, removeList, remove_file_by_extension
|
||||
|
||||
|
||||
def reformatNFTCollection(refactor_panel_input):
|
||||
completeCollPath = os.path.join(refactor_panel_input.save_path, "Blend_My_NFTs Output", "Complete_Collection")
|
||||
|
||||
if not os.path.exists(completeCollPath):
|
||||
os.mkdir(completeCollPath)
|
||||
|
||||
batchListDirty = os.listdir(refactor_panel_input.nftBatch_save_path)
|
||||
batchList = remove_file_by_extension(batchListDirty)
|
||||
collection_info = {"Total Time": 0}
|
||||
|
||||
for folder in batchList:
|
||||
batch_info = json.load(open(os.path.join(refactor_panel_input.nftBatch_save_path, folder, "batch_info.json")))
|
||||
collection_info[os.path.basename(folder)] = batch_info
|
||||
collection_info["Total Time"] = collection_info["Total Time"] + batch_info["Batch Render Time"]
|
||||
|
||||
fileListDirty = os.listdir(os.path.join(refactor_panel_input.nftBatch_save_path, folder))
|
||||
filelist = remove_file_by_extension(fileListDirty)
|
||||
|
||||
for mediaTypeFolder in filelist:
|
||||
if mediaTypeFolder != "batch_info.json":
|
||||
mediaTypeFolderDir = os.path.join(refactor_panel_input.nftBatch_save_path, folder, mediaTypeFolder)
|
||||
|
||||
for i in os.listdir(mediaTypeFolderDir):
|
||||
destination = os.path.join(completeCollPath, mediaTypeFolder)
|
||||
if not os.path.exists(destination):
|
||||
os.makedirs(destination)
|
||||
|
||||
shutil.move(os.path.join(mediaTypeFolderDir, i), destination)
|
||||
|
||||
collection_info = json.dumps(collection_info, indent=1, ensure_ascii=True)
|
||||
with open(os.path.join(completeCollPath, "collection_info.json"), 'w') as outfile:
|
||||
outfile.write(collection_info + '\n')
|
||||
|
||||
print(f"All NFT files stored and sorted to the Complete_Collection folder in {refactor_panel_input.save_path}")
|
||||
|
||||
shutil.rmtree(refactor_panel_input.nftBatch_save_path)
|
||||
|
|
@ -0,0 +1,359 @@
|
|||
# Purpose:
|
||||
# This file generates NFT DNA based on a .blend file scene structure and exports NFTRecord.json.
|
||||
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import random
|
||||
import logging
|
||||
import traceback
|
||||
from functools import partial
|
||||
|
||||
from . import logic, material_generator, helpers
|
||||
from .helpers import TextColors
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def generate_nft_dna(
|
||||
collection_size,
|
||||
enable_rarity,
|
||||
enable_logic,
|
||||
logic_file,
|
||||
enable_materials,
|
||||
materials_file,
|
||||
):
|
||||
"""
|
||||
Returns batchDataDictionary containing the number of NFT combinations, hierarchy, and the dna_list.
|
||||
"""
|
||||
|
||||
hierarchy = helpers.get_hierarchy()
|
||||
|
||||
# DNA random, Rarity and Logic methods:
|
||||
data_dictionary = {}
|
||||
|
||||
def create_dna_random(hierarchy):
|
||||
"""Creates a single DNA randomly without Rarity or Logic."""
|
||||
dna_str = ""
|
||||
dna_str_list = []
|
||||
list_option_variant = []
|
||||
|
||||
for i in hierarchy:
|
||||
num_child = len(hierarchy[i])
|
||||
possible_nums = list(range(1, num_child + 1))
|
||||
list_option_variant.append(possible_nums)
|
||||
|
||||
for i in list_option_variant:
|
||||
random_variant_num = random.choices(i, k=1)
|
||||
str1 = ''.join(str(e) for e in random_variant_num)
|
||||
dna_str_list.append(str1)
|
||||
|
||||
for i in dna_str_list:
|
||||
num = "-" + str(i)
|
||||
dna_str += num
|
||||
|
||||
dna = ''.join(dna_str.split('-', 1))
|
||||
|
||||
return str(dna)
|
||||
|
||||
def create_dna_rarity(hierarchy):
|
||||
"""
|
||||
Sorts through data_dictionary and appropriately weights each variant based on their rarity percentage set in Blender
|
||||
("rarity" in DNA_Generator). Then
|
||||
"""
|
||||
single_dna = ""
|
||||
|
||||
for i in hierarchy:
|
||||
number_list_of_i = []
|
||||
rarity_list_of_i = []
|
||||
if_zero_bool = None
|
||||
|
||||
for k in hierarchy[i]:
|
||||
number = hierarchy[i][k]["number"]
|
||||
number_list_of_i.append(number)
|
||||
|
||||
rarity = hierarchy[i][k]["rarity"]
|
||||
rarity_list_of_i.append(float(rarity))
|
||||
|
||||
for x in rarity_list_of_i:
|
||||
if x == 0:
|
||||
if_zero_bool = True
|
||||
elif x != 0:
|
||||
if_zero_bool = False
|
||||
|
||||
try:
|
||||
if if_zero_bool:
|
||||
variant_by_num = random.choices(number_list_of_i, k=1)
|
||||
elif not if_zero_bool:
|
||||
variant_by_num = random.choices(number_list_of_i, weights=rarity_list_of_i, k=1)
|
||||
except IndexError:
|
||||
raise IndexError(
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"An issue was found within the Attribute collection '{i}'. For more information on Blend_My_NFTs "
|
||||
f"compatible scenes, see:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
|
||||
single_dna += "-" + str(variant_by_num[0])
|
||||
single_dna = ''.join(single_dna.split('-', 1))
|
||||
return single_dna
|
||||
|
||||
def single_complete_dna():
|
||||
"""
|
||||
This function applies Rarity and Logic to a single DNA created by createDNASingle() if Rarity or Logic specified
|
||||
"""
|
||||
|
||||
single_dna = ""
|
||||
if not enable_rarity:
|
||||
single_dna = create_dna_random(hierarchy)
|
||||
log.debug(
|
||||
f"\n================"
|
||||
f"\nOriginal DNA: {single_dna}"
|
||||
)
|
||||
|
||||
if enable_rarity:
|
||||
single_dna = create_dna_rarity(hierarchy)
|
||||
log.debug(
|
||||
f"\n================"
|
||||
f"\nRarity DNA: {single_dna}"
|
||||
)
|
||||
|
||||
if enable_logic:
|
||||
single_dna = logic.logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity)
|
||||
log.debug(
|
||||
f"\n================"
|
||||
f"\nLogic DNA: {single_dna}"
|
||||
)
|
||||
|
||||
if enable_materials:
|
||||
single_dna = material_generator.apply_materials(hierarchy, single_dna, materials_file, enable_rarity)
|
||||
log.debug(
|
||||
f"\n================"
|
||||
f"\nMaterials DNA: {single_dna}"
|
||||
f"\n================\n"
|
||||
|
||||
)
|
||||
|
||||
return single_dna
|
||||
|
||||
def create_dna_list():
|
||||
"""
|
||||
Creates dna_list. Loops through createDNARandom() and applies Rarity, and Logic while checking if all DNA
|
||||
are unique.
|
||||
"""
|
||||
dna_set_return = set()
|
||||
|
||||
for i in range(collection_size):
|
||||
dna_push_to_list = partial(single_complete_dna)
|
||||
|
||||
dna_set_return |= {''.join([dna_push_to_list()]) for _ in range(collection_size - len(dna_set_return))}
|
||||
|
||||
dna_list_non_formatted = list(dna_set_return)
|
||||
|
||||
dna_list_formatted = []
|
||||
dna_counter = 1
|
||||
for i in dna_list_non_formatted:
|
||||
dna_list_formatted.append({
|
||||
i: {
|
||||
"complete": False,
|
||||
"order_num": dna_counter
|
||||
}
|
||||
})
|
||||
|
||||
dna_counter += 1
|
||||
|
||||
return dna_list_formatted
|
||||
|
||||
dna_list = create_dna_list()
|
||||
|
||||
helpers.raise_warning_collection_size(dna_list, collection_size)
|
||||
|
||||
# Data stored in batchDataDictionary:
|
||||
data_dictionary["num_nfts_generated"] = len(dna_list)
|
||||
data_dictionary["hierarchy"] = hierarchy
|
||||
data_dictionary["dna_list"] = dna_list
|
||||
|
||||
return data_dictionary
|
||||
|
||||
|
||||
def make_batches(
|
||||
collection_size,
|
||||
nfts_per_batch,
|
||||
save_path,
|
||||
batch_json_save_path
|
||||
):
|
||||
"""
|
||||
Sorts through all the batches and outputs a given number of batches depending on collection_size and nfts_per_batch.
|
||||
These files are then saved as Batch#.json files to batch_json_save_path
|
||||
"""
|
||||
|
||||
# Clears the Batch Data folder of Batches:
|
||||
batch_list = os.listdir(batch_json_save_path)
|
||||
if batch_list:
|
||||
for i in batch_list:
|
||||
batch = os.path.join(batch_json_save_path, i)
|
||||
if os.path.exists(batch):
|
||||
os.remove(
|
||||
os.path.join(batch_json_save_path, i)
|
||||
)
|
||||
|
||||
blend_my_nf_ts_output = os.path.join(save_path, "Blend_My_NFTs Output", "NFT_Data")
|
||||
nft_record_save_path = os.path.join(blend_my_nf_ts_output, "NFTRecord.json")
|
||||
data_dictionary = json.load(open(nft_record_save_path))
|
||||
|
||||
hierarchy = data_dictionary["hierarchy"]
|
||||
dna_list = data_dictionary["dna_list"]
|
||||
|
||||
num_batches = collection_size // nfts_per_batch
|
||||
remainder_dna = collection_size % nfts_per_batch
|
||||
if remainder_dna > 0:
|
||||
num_batches += 1
|
||||
|
||||
log.info(
|
||||
f"\nGenerating {num_batches} batch files. If the last batch isn't filled all the way the program will "
|
||||
f"operate normally."
|
||||
)
|
||||
|
||||
batches_dna_list = []
|
||||
|
||||
for i in range(num_batches):
|
||||
if i != range(num_batches)[-1]:
|
||||
batch_dna_list = list(dna_list[0:nfts_per_batch])
|
||||
batches_dna_list.append(batch_dna_list)
|
||||
|
||||
dna_list = [x for x in dna_list if x not in batch_dna_list]
|
||||
else:
|
||||
batch_dna_list = dna_list
|
||||
|
||||
batch_dictionary = {
|
||||
"nfts_in_batch": int(len(batch_dna_list)),
|
||||
"hierarchy": hierarchy,
|
||||
"batch_dna_list": batch_dna_list
|
||||
}
|
||||
|
||||
batch_dictionary = json.dumps(batch_dictionary, indent=1, ensure_ascii=True)
|
||||
|
||||
with open(os.path.join(batch_json_save_path, f"Batch{i + 1}.json"), "w") as outfile:
|
||||
outfile.write(batch_dictionary)
|
||||
|
||||
|
||||
def send_to_record(
|
||||
collection_size,
|
||||
nfts_per_batch,
|
||||
save_path,
|
||||
enable_rarity,
|
||||
enable_logic,
|
||||
logic_file,
|
||||
enable_materials,
|
||||
materials_file,
|
||||
blend_my_nfts_output,
|
||||
batch_json_save_path,
|
||||
enable_debug,
|
||||
log_path
|
||||
):
|
||||
"""
|
||||
Creates NFTRecord.json file and sends "batch_data_dictionary" to it. NFTRecord.json is a permanent record of all DNA
|
||||
you've generated with all attribute variants. If you add new variants or attributes to your .blend file, other scripts
|
||||
need to reference this .json file to generate new DNA and make note of the new attributes and variants to prevent
|
||||
repeat DNA.
|
||||
"""
|
||||
|
||||
# Checking Scene is compatible with BMNFTs:
|
||||
helpers.check_scene()
|
||||
|
||||
# Messages:
|
||||
log.info(
|
||||
f"\n{TextColors.OK}======== Creating NFT Data ({collection_size} DNA) ========{TextColors.RESET}"
|
||||
)
|
||||
|
||||
if not enable_rarity and not enable_logic:
|
||||
log.info(
|
||||
f"\n - NFT DNA will be determined randomly, no special properties or parameters are "
|
||||
f"applied."
|
||||
)
|
||||
|
||||
if enable_rarity:
|
||||
log.info(
|
||||
f"\n - Rarity is ON. Weights listed in .blend scene will be taken into account."
|
||||
f""
|
||||
)
|
||||
|
||||
if enable_logic:
|
||||
log.info(
|
||||
f"\n - Logic is ON. {len(list(logic_file.keys()))} rules detected, implementation will "
|
||||
f"be attempted."
|
||||
)
|
||||
|
||||
if enable_materials:
|
||||
log.info(
|
||||
f"\n - Materials are ON. {len(list(json.load(open(materials_file)).keys()))} materials "
|
||||
f"instances detected, implementation will be attempted."
|
||||
)
|
||||
time_start = time.time()
|
||||
|
||||
def create_nft_data():
|
||||
try:
|
||||
data_dictionary = generate_nft_dna(
|
||||
collection_size,
|
||||
enable_rarity,
|
||||
enable_logic,
|
||||
logic_file,
|
||||
enable_materials,
|
||||
materials_file,
|
||||
)
|
||||
nft_record_save_path = os.path.join(blend_my_nfts_output, "NFTRecord.json")
|
||||
|
||||
# Checks:
|
||||
helpers.raise_warning_max_nfts(nfts_per_batch, collection_size)
|
||||
helpers.check_duplicates(data_dictionary["dna_list"])
|
||||
helpers.raise_error_zero_combinations()
|
||||
|
||||
if enable_rarity:
|
||||
helpers.check_rarity(data_dictionary["hierarchy"], data_dictionary["dna_list"],
|
||||
os.path.join(save_path, "Blend_My_NFTs Output/NFT_Data"))
|
||||
|
||||
except FileNotFoundError:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"Data not saved to NFTRecord.json, file not found. Check that your save path, logic file path, or "
|
||||
f"materials file path is correct. For more information, see:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
raise
|
||||
|
||||
finally:
|
||||
loading.stop()
|
||||
|
||||
try:
|
||||
ledger = json.dumps(data_dictionary, indent=1, ensure_ascii=True)
|
||||
with open(nft_record_save_path, 'w') as outfile:
|
||||
outfile.write(ledger + '\n')
|
||||
|
||||
log.info(
|
||||
f"\n{TextColors.OK}{len(data_dictionary['dna_list'])} NFT data successfully saved to:"
|
||||
f"\n{nft_record_save_path}{TextColors.RESET}"
|
||||
)
|
||||
|
||||
except Exception:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"Data not saved to NFTRecord.json. Please review your Blender scene and ensure it follows "
|
||||
f"the naming conventions and scene structure. For more information, "
|
||||
f"see:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
raise
|
||||
|
||||
# Loading Animation:
|
||||
loading = helpers.Loader(f'\nCreating NFT DNA...', '').start()
|
||||
create_nft_data()
|
||||
make_batches(collection_size, nfts_per_batch, save_path, batch_json_save_path)
|
||||
loading.stop()
|
||||
|
||||
time_end = time.time()
|
||||
|
||||
log.info(
|
||||
f"\n{TextColors.OK}TIME [Created and Saved NFT data]: {time_end - time_start}s.\n{TextColors.RESET}"
|
||||
)
|
|
@ -0,0 +1,670 @@
|
|||
# Purpose:
|
||||
# This file takes a given Batch created by dna_generator.py and tells blender to render the image or export a 3D model
|
||||
# to the NFT_Output folder.
|
||||
|
||||
import bpy
|
||||
import os
|
||||
import ssl
|
||||
import time
|
||||
import json
|
||||
import smtplib
|
||||
import logging
|
||||
import datetime
|
||||
import platform
|
||||
import traceback
|
||||
|
||||
from .helpers import TextColors, Loader
|
||||
from .metadata_templates import create_cardano_metadata, createSolanaMetaData, create_erc721_meta_data
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Save info
|
||||
def save_batch(batch, file_name):
|
||||
saved_batch = json.dumps(batch, indent=1, ensure_ascii=True)
|
||||
|
||||
with open(os.path.join(file_name), 'w') as outfile:
|
||||
outfile.write(saved_batch + '\n')
|
||||
|
||||
|
||||
def save_generation_state(input):
|
||||
"""
|
||||
Saves date and time of generation start, and generation types; Images, Animations, 3D Models, and the file types for
|
||||
each.
|
||||
"""
|
||||
file_name = os.path.join(input.batch_json_save_path, "Batch{}.json".format(input.batch_to_generate))
|
||||
batch = json.load(open(file_name))
|
||||
|
||||
current_time = datetime.datetime.now().strftime("%H:%M:%S")
|
||||
current_date = datetime.datetime.now().strftime("%d/%m/%Y")
|
||||
local_timezone = str(datetime.datetime.now(datetime.timezone.utc))
|
||||
|
||||
if "Generation Save" in batch:
|
||||
batch_save_number = int(batch[f"Generation Save"].index(batch[f"Generation Save"][-1]))
|
||||
else:
|
||||
batch_save_number = 0
|
||||
|
||||
batch["Generation Save"] = list()
|
||||
batch["Generation Save"].append({
|
||||
"Batch Save Number": batch_save_number + 1,
|
||||
"DNA Generated": None,
|
||||
"Generation Start Date and Time": [current_time, current_date, local_timezone],
|
||||
"Render_Settings": {
|
||||
"nft_name": input.nft_name,
|
||||
"save_path": input.save_path,
|
||||
"nfts_per_batch": input.nfts_per_batch,
|
||||
"batch_to_generate": input.batch_to_generate,
|
||||
"collection_size": input.collection_size,
|
||||
|
||||
"blend_my_nfts_output": input.blend_my_nfts_output,
|
||||
"batch_json_save_path": input.batch_json_save_path,
|
||||
"nft_batch_save_path": input.nft_batch_save_path,
|
||||
|
||||
"enable_images": input.enable_images,
|
||||
"image_file_format": input.image_file_format,
|
||||
|
||||
"enable_animations": input.enable_animations,
|
||||
"animation_file_format": input.animation_file_format,
|
||||
|
||||
"enable_models": input.enable_models,
|
||||
"model_file_format": input.model_file_format,
|
||||
|
||||
"enable_custom_fields": input.enable_custom_fields,
|
||||
|
||||
"cardano_metadata_bool": input.cardano_metadata_bool,
|
||||
"solana_metadata_bool": input.solana_metadata_bool,
|
||||
"erc721_metadata": input.erc721_metadata,
|
||||
|
||||
"cardano_description": input.cardano_description,
|
||||
"solana_description": input.solana_description,
|
||||
"erc721_description": input.erc721_description,
|
||||
|
||||
"enable_materials": input.enable_materials,
|
||||
"materials_file": input.materials_file,
|
||||
|
||||
"enable_logic": input.enable_logic,
|
||||
"enable_logic_json": input.enable_logic_json,
|
||||
"logic_file": input.logic_file,
|
||||
|
||||
"enable_rarity": input.enable_rarity,
|
||||
|
||||
"enable_auto_shutdown": input.enable_auto_shutdown,
|
||||
|
||||
"specify_time_bool": input.specify_time_bool,
|
||||
"hours": input.hours,
|
||||
"minutes": input.minutes,
|
||||
|
||||
"email_notification_bool": input.email_notification_bool,
|
||||
"sender_from": input.sender_from,
|
||||
"email_password": input.email_password,
|
||||
"receiver_to": input.receiver_to,
|
||||
|
||||
"enable_debug": input.enable_debug,
|
||||
"log_path": input.log_path,
|
||||
|
||||
"enable_dry_run": input.enable_dry_run,
|
||||
|
||||
"custom_fields": input.custom_fields,
|
||||
},
|
||||
})
|
||||
|
||||
save_batch(batch, file_name)
|
||||
|
||||
|
||||
def save_completed(full_single_dna, a, x, batch_json_save_path, batch_to_generate):
|
||||
"""Saves progress of rendering to batch.json file."""
|
||||
|
||||
file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batch_to_generate))
|
||||
batch = json.load(open(file_name))
|
||||
index = batch["batch_dna_list"].index(a)
|
||||
batch["batch_dna_list"][index][full_single_dna]["complete"] = True
|
||||
batch["Generation Save"][-1]["DNA Generated"] = x
|
||||
|
||||
save_batch(batch, file_name)
|
||||
|
||||
|
||||
# Exporter functions:
|
||||
def get_batch_data(batch_to_generate, batch_json_save_path):
|
||||
"""
|
||||
Retrieves a given batches data determined by renderBatch in config.py
|
||||
"""
|
||||
|
||||
file_name = os.path.join(batch_json_save_path, "Batch{}.json".format(batch_to_generate))
|
||||
batch = json.load(open(file_name))
|
||||
|
||||
nfts_in_batch = batch["nfts_in_batch"]
|
||||
hierarchy = batch["hierarchy"]
|
||||
batch_dna_list = batch["batch_dna_list"]
|
||||
|
||||
return nfts_in_batch, hierarchy, batch_dna_list
|
||||
|
||||
|
||||
def render_and_save_nfts(input):
|
||||
"""
|
||||
Renders the NFT DNA in a Batch#.json, where # is renderBatch in config.py. Turns off the viewport camera and
|
||||
the render camera for all items in hierarchy.
|
||||
"""
|
||||
|
||||
time_start_1 = time.time()
|
||||
|
||||
# If failed Batch is detected and user is resuming its generation:
|
||||
if input.fail_state:
|
||||
log.info(
|
||||
f"{TextColors.OK}\nResuming Batch #{input.failed_batch}{TextColors.RESET}"
|
||||
)
|
||||
nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.failed_batch, input.batch_json_save_path)
|
||||
for a in range(input.failed_dna):
|
||||
del batch_dna_list[0]
|
||||
x = input.failed_dna + 1
|
||||
|
||||
# If user is generating the normal way:
|
||||
else:
|
||||
log.info(
|
||||
f"{TextColors.OK}\n======== Generating Batch #{input.batch_to_generate} ========{TextColors.RESET}"
|
||||
)
|
||||
nfts_in_batch, hierarchy, batch_dna_list = get_batch_data(input.batch_to_generate, input.batch_json_save_path)
|
||||
save_generation_state(input)
|
||||
x = 1
|
||||
|
||||
if input.enable_materials:
|
||||
materials_file = json.load(open(input.materials_file))
|
||||
|
||||
for a in batch_dna_list:
|
||||
full_single_dna = list(a.keys())[0]
|
||||
order_num = a[full_single_dna]['order_num']
|
||||
|
||||
# Material handling:
|
||||
if input.enable_materials:
|
||||
single_dna, material_dna = full_single_dna.split(':')
|
||||
|
||||
if not input.enable_materials:
|
||||
single_dna = full_single_dna
|
||||
|
||||
def match_dna_to_variant(single_dna):
|
||||
"""
|
||||
Matches each DNA number separated by "-" to its attribute, then its variant.
|
||||
"""
|
||||
|
||||
list_attributes = list(hierarchy.keys())
|
||||
list_dna_deconstructed = single_dna.split('-')
|
||||
dna_dictionary = {}
|
||||
|
||||
for i, j in zip(list_attributes, list_dna_deconstructed):
|
||||
dna_dictionary[i] = j
|
||||
|
||||
for x in dna_dictionary:
|
||||
for k in hierarchy[x]:
|
||||
k_num = hierarchy[x][k]["number"]
|
||||
if k_num == dna_dictionary[x]:
|
||||
dna_dictionary.update({x: k})
|
||||
return dna_dictionary
|
||||
|
||||
def match_material_dna_to_material(single_dna, material_dna, materials_file):
|
||||
"""
|
||||
Matches the Material DNA to it's selected Materials unless a 0 is present meaning no material for that variant was selected.
|
||||
"""
|
||||
list_attributes = list(hierarchy.keys())
|
||||
list_dna_deconstructed = single_dna.split('-')
|
||||
list_material_dna_deconstructed = material_dna.split('-')
|
||||
|
||||
full_dna_dict = {}
|
||||
|
||||
for attribute, variant, material in zip(
|
||||
list_attributes,
|
||||
list_dna_deconstructed,
|
||||
list_material_dna_deconstructed
|
||||
):
|
||||
|
||||
for var in hierarchy[attribute]:
|
||||
if hierarchy[attribute][var]['number'] == variant:
|
||||
variant = var
|
||||
|
||||
if material != '0': # If material is not empty
|
||||
for variant_m in materials_file:
|
||||
if variant == variant_m:
|
||||
# Getting Materials name from Materials index in the Materials List
|
||||
materials_list = list(materials_file[variant_m]["Material List"].keys())
|
||||
|
||||
material = materials_list[int(material) - 1] # Subtract 1 because '0' means empty mat
|
||||
break
|
||||
|
||||
full_dna_dict[variant] = material
|
||||
|
||||
return full_dna_dict
|
||||
|
||||
metadata_material_dict = {}
|
||||
|
||||
if input.enable_materials:
|
||||
material_dna_dictionary = match_material_dna_to_material(single_dna, material_dna, materials_file)
|
||||
|
||||
for var_mat in list(material_dna_dictionary.keys()):
|
||||
if material_dna_dictionary[var_mat]!='0':
|
||||
if not materials_file[var_mat]['Variant Objects']:
|
||||
"""
|
||||
If objects to apply material to not specified, apply to all objects in Variant collection.
|
||||
"""
|
||||
metadata_material_dict[var_mat] = material_dna_dictionary[var_mat]
|
||||
|
||||
for obj in bpy.data.collections[var_mat].all_objects:
|
||||
selected_object = bpy.data.objects.get(obj.name)
|
||||
selected_object.active_material = bpy.data.materials[material_dna_dictionary[var_mat]]
|
||||
|
||||
if materials_file[var_mat]['Variant Objects']:
|
||||
"""
|
||||
If objects to apply material to are specified, apply material only to objects specified withing
|
||||
the Variant collection.
|
||||
"""
|
||||
metadata_material_dict[var_mat] = material_dna_dictionary[var_mat]
|
||||
|
||||
for obj in materials_file[var_mat]['Variant Objects']:
|
||||
selected_object = bpy.data.objects.get(obj)
|
||||
selected_object.active_material = bpy.data.materials[material_dna_dictionary[var_mat]]
|
||||
|
||||
# Turn off render camera and viewport camera for all collections in hierarchy
|
||||
for i in hierarchy:
|
||||
for j in hierarchy[i]:
|
||||
try:
|
||||
bpy.data.collections[j].hide_render = True
|
||||
bpy.data.collections[j].hide_viewport = True
|
||||
except KeyError:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"The Collection '{j}' appears to be missing or has been renamed. If you made any changes "
|
||||
f"to your .blend file scene, ensure you re-create your NFT Data so Blend_My_NFTs can read "
|
||||
f"your scene. For more information see:{TextColors.RESET}"
|
||||
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
raise TypeError()
|
||||
|
||||
dna_dictionary = match_dna_to_variant(single_dna)
|
||||
name = input.nft_name + "_" + str(order_num)
|
||||
|
||||
# Change Text Object in Scene to match DNA string:
|
||||
# Variables that can be used: full_single_dna, name, order_num
|
||||
# ob = bpy.data.objects['Text'] # Object name
|
||||
# ob.data.body = str(f"DNA: {full_single_dna}") # Set text of Text Object ob
|
||||
|
||||
log.info(
|
||||
f"\n{TextColors.OK}======== Generating NFT {x}/{nfts_in_batch}: {name} ========{TextColors.RESET}"
|
||||
f"\nVariants selected:"
|
||||
f"\n{dna_dictionary}"
|
||||
)
|
||||
if input.enable_materials:
|
||||
log.info(
|
||||
f"\nMaterials selected:"
|
||||
f"\n{material_dna_dictionary}"
|
||||
)
|
||||
|
||||
log.info(f"\nDNA Code:{full_single_dna}")
|
||||
|
||||
for c in dna_dictionary:
|
||||
collection = dna_dictionary[c]
|
||||
if collection != '0':
|
||||
bpy.data.collections[collection].hide_render = False
|
||||
bpy.data.collections[collection].hide_viewport = False
|
||||
|
||||
time_start_2 = time.time()
|
||||
|
||||
# Main paths for batch sub-folders:
|
||||
batch_folder = os.path.join(input.nft_batch_save_path, "Batch" + str(input.batch_to_generate))
|
||||
|
||||
image_folder = os.path.join(batch_folder, "Images")
|
||||
animation_folder = os.path.join(batch_folder, "Animations")
|
||||
model_folder = os.path.join(batch_folder, "Models")
|
||||
bmnft_data_folder = os.path.join(batch_folder, "BMNFT_data")
|
||||
|
||||
image_path = os.path.join(image_folder, name)
|
||||
animation_path = os.path.join(animation_folder, name)
|
||||
model_path = os.path.join(model_folder, name)
|
||||
|
||||
cardano_metadata_path = os.path.join(batch_folder, "Cardano_metadata")
|
||||
solana_metadata_path = os.path.join(batch_folder, "Solana_metadata")
|
||||
erc721_metadata_path = os.path.join(batch_folder, "Erc721_metadata")
|
||||
|
||||
def check_failed_exists(file_path):
|
||||
"""
|
||||
Delete a file if a fail state is detected and if the file being re-generated already exists. Prevents
|
||||
animations from corrupting.
|
||||
"""
|
||||
if input.fail_state:
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
|
||||
# Generation/Rendering:
|
||||
if input.enable_images:
|
||||
|
||||
log.info(f"\n{TextColors.OK}-------- Image --------{TextColors.RESET}")
|
||||
|
||||
image_render_time_start = time.time()
|
||||
|
||||
check_failed_exists(image_path)
|
||||
|
||||
def render_image():
|
||||
if not os.path.exists(image_folder):
|
||||
os.makedirs(image_folder)
|
||||
|
||||
bpy.context.scene.render.filepath = image_path
|
||||
bpy.context.scene.render.image_settings.file_format = input.image_file_format
|
||||
|
||||
if not input.enable_debug:
|
||||
bpy.ops.render.render(write_still=True)
|
||||
|
||||
# Loading Animation:
|
||||
loading = Loader(f'Rendering Image {x}/{nfts_in_batch}...', '').start()
|
||||
render_image()
|
||||
loading.stop()
|
||||
|
||||
image_render_time_end = time.time()
|
||||
|
||||
log.info(
|
||||
f"{TextColors.OK}TIME [Rendered Image]: {image_render_time_end - image_render_time_start}s."
|
||||
f"\n{TextColors.RESET}"
|
||||
)
|
||||
|
||||
if input.enable_animations:
|
||||
log.info(f"\n{TextColors.OK}-------- Animation --------{TextColors.RESET}")
|
||||
|
||||
animation_render_time_start = time.time()
|
||||
|
||||
check_failed_exists(animation_path)
|
||||
|
||||
def render_animation():
|
||||
if not os.path.exists(animation_folder):
|
||||
os.makedirs(animation_folder)
|
||||
|
||||
if not input.enable_debug:
|
||||
if input.animation_file_format == 'MP4':
|
||||
bpy.context.scene.render.filepath = animation_path
|
||||
bpy.context.scene.render.image_settings.file_format = "FFMPEG"
|
||||
|
||||
bpy.context.scene.render.ffmpeg.format = 'MPEG4'
|
||||
bpy.context.scene.render.ffmpeg.codec = 'H264'
|
||||
bpy.ops.render.render(animation=True)
|
||||
|
||||
elif input.animation_file_format == 'PNG':
|
||||
if not os.path.exists(animation_path):
|
||||
os.makedirs(animation_path)
|
||||
|
||||
bpy.context.scene.render.filepath = os.path.join(animation_path, name)
|
||||
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
|
||||
bpy.ops.render.render(animation=True)
|
||||
|
||||
elif input.animation_file_format == 'TIFF':
|
||||
if not os.path.exists(animation_path):
|
||||
os.makedirs(animation_path)
|
||||
|
||||
bpy.context.scene.render.filepath = os.path.join(animation_path, name)
|
||||
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
|
||||
bpy.ops.render.render(animation=True)
|
||||
|
||||
else:
|
||||
bpy.context.scene.render.filepath = animation_path
|
||||
bpy.context.scene.render.image_settings.file_format = input.animation_file_format
|
||||
bpy.ops.render.render(animation=True)
|
||||
|
||||
# Loading Animation:
|
||||
loading = Loader(f'Rendering Animation {x}/{nfts_in_batch}...', '').start()
|
||||
render_animation()
|
||||
loading.stop()
|
||||
|
||||
animation_render_time_end = time.time()
|
||||
|
||||
log.info(
|
||||
f"\n{TextColors.OK}TIME [Rendered Animation]: "
|
||||
f"{animation_render_time_end - animation_render_time_start}s.{TextColors.RESET}"
|
||||
)
|
||||
|
||||
if input.enable_models:
|
||||
log.info(f"\n{TextColors.OK}-------- 3D Model --------{TextColors.RESET}")
|
||||
|
||||
model_generation_time_start = time.time()
|
||||
|
||||
def generate_models():
|
||||
if not os.path.exists(model_folder):
|
||||
os.makedirs(model_folder)
|
||||
|
||||
for i in dna_dictionary:
|
||||
coll = dna_dictionary[i]
|
||||
if coll != '0':
|
||||
for obj in bpy.data.collections[coll].all_objects:
|
||||
obj.select_set(True)
|
||||
|
||||
for obj in bpy.data.collections['Script_Ignore'].all_objects:
|
||||
obj.select_set(True)
|
||||
|
||||
# Remove objects from 3D model export:
|
||||
# remove_objects: list = [
|
||||
# ]
|
||||
#
|
||||
# for obj in bpy.data.objects:
|
||||
# if obj.name in remove_objects:
|
||||
# obj.select_set(False)
|
||||
|
||||
if not input.enable_debug:
|
||||
if input.model_file_format == 'GLB':
|
||||
check_failed_exists(f"{model_path}.glb")
|
||||
bpy.ops.export_scene.gltf(
|
||||
filepath=f"{model_path}.glb",
|
||||
check_existing=True,
|
||||
export_format='GLB',
|
||||
export_keep_originals=True,
|
||||
use_selection=True
|
||||
)
|
||||
if input.model_file_format == 'GLTF_SEPARATE':
|
||||
check_failed_exists(f"{model_path}.gltf")
|
||||
check_failed_exists(f"{model_path}.bin")
|
||||
bpy.ops.export_scene.gltf(
|
||||
filepath=f"{model_path}",
|
||||
check_existing=True,
|
||||
export_format='GLTF_SEPARATE',
|
||||
export_keep_originals=True,
|
||||
use_selection=True
|
||||
)
|
||||
if input.model_file_format == 'GLTF_EMBEDDED':
|
||||
check_failed_exists(f"{model_path}.gltf")
|
||||
bpy.ops.export_scene.gltf(
|
||||
filepath=f"{model_path}.gltf",
|
||||
check_existing=True,
|
||||
export_format='GLTF_EMBEDDED',
|
||||
export_keep_originals=True,
|
||||
use_selection=True
|
||||
)
|
||||
elif input.model_file_format == 'FBX':
|
||||
check_failed_exists(f"{model_path}.fbx")
|
||||
bpy.ops.export_scene.fbx(
|
||||
filepath=f"{model_path}.fbx",
|
||||
check_existing=True,
|
||||
use_selection=True
|
||||
)
|
||||
elif input.model_file_format == 'OBJ':
|
||||
check_failed_exists(f"{model_path}.obj")
|
||||
bpy.ops.export_scene.obj(
|
||||
filepath=f"{model_path}.obj",
|
||||
check_existing=True,
|
||||
use_selection=True,
|
||||
)
|
||||
elif input.model_file_format == 'X3D':
|
||||
check_failed_exists(f"{model_path}.x3d")
|
||||
bpy.ops.export_scene.x3d(
|
||||
filepath=f"{model_path}.x3d",
|
||||
check_existing=True,
|
||||
use_selection=True
|
||||
)
|
||||
elif input.model_file_format == 'STL':
|
||||
check_failed_exists(f"{model_path}.stl")
|
||||
bpy.ops.export_mesh.stl(
|
||||
filepath=f"{model_path}.stl",
|
||||
check_existing=True,
|
||||
use_selection=True
|
||||
)
|
||||
elif input.model_file_format == 'VOX':
|
||||
check_failed_exists(f"{model_path}.vox")
|
||||
bpy.ops.export_vox.some_data(filepath=f"{model_path}.vox")
|
||||
|
||||
# Loading Animation:
|
||||
loading = Loader(f'Generating 3D model {x}/{nfts_in_batch}...', '').start()
|
||||
generate_models()
|
||||
loading.stop()
|
||||
|
||||
model_generation_time_end = time.time()
|
||||
|
||||
log.info(
|
||||
f"\n{TextColors.OK}TIME [Generated 3D Model]: "
|
||||
f"{model_generation_time_end - model_generation_time_start}s.{TextColors.RESET}"
|
||||
)
|
||||
|
||||
# Generating Metadata:
|
||||
if input.cardano_metadata_bool:
|
||||
if not os.path.exists(cardano_metadata_path):
|
||||
os.makedirs(cardano_metadata_path)
|
||||
create_cardano_metadata(
|
||||
name,
|
||||
order_num,
|
||||
full_single_dna,
|
||||
dna_dictionary,
|
||||
metadata_material_dict,
|
||||
input.custom_fields,
|
||||
input.enable_custom_fields,
|
||||
input.cardano_description,
|
||||
cardano_metadata_path
|
||||
)
|
||||
|
||||
if input.solana_metadata_bool:
|
||||
if not os.path.exists(solana_metadata_path):
|
||||
os.makedirs(solana_metadata_path)
|
||||
createSolanaMetaData(
|
||||
name,
|
||||
order_num,
|
||||
full_single_dna,
|
||||
dna_dictionary,
|
||||
metadata_material_dict,
|
||||
input.custom_fields,
|
||||
input.enable_custom_fields,
|
||||
input.solana_description,
|
||||
solana_metadata_path
|
||||
)
|
||||
|
||||
if input.erc721_metadata:
|
||||
if not os.path.exists(erc721_metadata_path):
|
||||
os.makedirs(erc721_metadata_path)
|
||||
create_erc721_meta_data(
|
||||
name,
|
||||
order_num,
|
||||
full_single_dna,
|
||||
dna_dictionary,
|
||||
metadata_material_dict,
|
||||
input.custom_fields,
|
||||
input.enable_custom_fields,
|
||||
input.erc721_description,
|
||||
erc721_metadata_path
|
||||
)
|
||||
|
||||
if not os.path.exists(bmnft_data_folder):
|
||||
os.makedirs(bmnft_data_folder)
|
||||
|
||||
for b in dna_dictionary:
|
||||
if dna_dictionary[b] == "0":
|
||||
dna_dictionary[b] = "Empty"
|
||||
|
||||
meta_data_dict = {
|
||||
"name": name,
|
||||
"nft_dna": a,
|
||||
"nft_variants": dna_dictionary,
|
||||
"material_attributes": metadata_material_dict
|
||||
}
|
||||
|
||||
json_meta_data = json.dumps(meta_data_dict, indent=1, ensure_ascii=True)
|
||||
|
||||
with open(os.path.join(bmnft_data_folder, "Data_" + name + ".json"), 'w') as outfile:
|
||||
outfile.write(json_meta_data + '\n')
|
||||
|
||||
log.info(f"{TextColors.OK}\nTIME [NFT {name} Generated]: {time.time() - time_start_2}s")
|
||||
|
||||
save_completed(full_single_dna, a, x, input.batch_json_save_path, input.batch_to_generate)
|
||||
|
||||
x += 1
|
||||
|
||||
for i in hierarchy:
|
||||
for j in hierarchy[i]:
|
||||
bpy.data.collections[j].hide_render = False
|
||||
bpy.data.collections[j].hide_viewport = False
|
||||
|
||||
batch_complete_time = time.time() - time_start_1
|
||||
|
||||
log.info(
|
||||
f"\nAll NFTs in Batch {input.batch_to_generate} successfully generated and saved at:"
|
||||
f"\n{input.nft_batch_save_path}"
|
||||
f"\nTIME [Batch {input.batch_to_generate} Generated]: {batch_complete_time}s\n"
|
||||
)
|
||||
|
||||
batch_info = {"Batch Render Time": batch_complete_time, "Number of NFTs generated in Batch": x - 1,
|
||||
"Average time per generation": batch_complete_time / x - 1}
|
||||
|
||||
batch_info_folder = os.path.join(
|
||||
input.nft_batch_save_path,
|
||||
"Batch" + str(input.batch_to_generate),
|
||||
"batch_info.json"
|
||||
)
|
||||
|
||||
save_batch(batch_info, batch_info_folder)
|
||||
|
||||
# Send Email that Batch is complete:
|
||||
if input.email_notification_bool:
|
||||
port = 465 # For SSL
|
||||
smtp_server = "smtp.gmail.com"
|
||||
sender_email = input.sender_from # Enter your address
|
||||
receiver_email = input.receiver_to # Enter receiver address
|
||||
password = input.email_password
|
||||
|
||||
# Get batch info for message:
|
||||
if input.fail_state:
|
||||
batch = input.fail_state
|
||||
batch_data = get_batch_data(input.failed_batch, input.batch_json_save_path)
|
||||
|
||||
else:
|
||||
batch_data = get_batch_data(input.batch_to_generate, input.batch_json_save_path)
|
||||
|
||||
batch = input.batch_to_generate
|
||||
|
||||
generation_time = str(datetime.timedelta(seconds=batch_complete_time))
|
||||
|
||||
message = f"""\
|
||||
Subject: Batch {batch} completed {x - 1} NFTs in {generation_time} (h:m:s)
|
||||
|
||||
Generation Time:
|
||||
{generation_time.split(':')[0]} Hours,
|
||||
{generation_time.split(':')[1]} Minutes,
|
||||
{generation_time.split(':')[2]} Seconds
|
||||
Batch Data:
|
||||
|
||||
{batch_data}
|
||||
|
||||
This message was sent from an instance of the Blend_My_NFTs Blender add-on.
|
||||
"""
|
||||
|
||||
context = ssl.create_default_context()
|
||||
with smtplib.SMTP_SSL(smtp_server, port, context=context) as server:
|
||||
server.login(sender_email, password)
|
||||
server.sendmail(sender_email, receiver_email, message)
|
||||
|
||||
# Automatic Shutdown:
|
||||
# If user selects automatic shutdown but did not specify time after Batch completion
|
||||
def shutdown(time):
|
||||
if platform.system() == "Windows":
|
||||
os.system(f"shutdown /s /t {time}")
|
||||
if platform.system() == "Darwin":
|
||||
os.system(f"shutdown /s /t {time}")
|
||||
|
||||
if input.enable_auto_shutdown and not input.specify_time_bool:
|
||||
shutdown(0)
|
||||
|
||||
# If user selects automatic shutdown and specify time after Batch completion
|
||||
if input.enable_auto_shutdown and input.specify_time_bool:
|
||||
hours = (int(input.hours) / 60) / 60
|
||||
minutes = int(input.minutes) / 60
|
||||
total_sleep_time = hours + minutes
|
||||
|
||||
# time.sleep(total_sleep_time)
|
||||
|
||||
shutdown(total_sleep_time)
|
|
@ -1,26 +0,0 @@
|
|||
import bpy
|
||||
|
||||
from . import DNA_Generator
|
||||
|
||||
|
||||
def get_combinations():
|
||||
"""
|
||||
Returns "combinations", the number of all possible NFT DNA for a given Blender scene formatted to BMNFTs conventions
|
||||
combinations.
|
||||
"""
|
||||
|
||||
hierarchy = DNA_Generator.get_hierarchy()
|
||||
hierarchyByNum = []
|
||||
|
||||
for i in hierarchy:
|
||||
# Ignore Collections with nothing in them
|
||||
if len(hierarchy[i]) != 0:
|
||||
hierarchyByNum.append(len(hierarchy[i]))
|
||||
else:
|
||||
print(f"The following collection has been identified as empty: {i}")
|
||||
|
||||
combinations = 1
|
||||
for i in hierarchyByNum:
|
||||
combinations = combinations*i
|
||||
|
||||
return combinations
|
|
@ -1,11 +1,12 @@
|
|||
#adding CLI arguments
|
||||
#Used this as a basis:
|
||||
#https://developer.blender.org/diffusion/B/browse/master/release/scripts/templates_py/background_job.py
|
||||
# adding CLI arguments
|
||||
# Used this as a basis:
|
||||
# https://developer.blender.org/diffusion/B/browse/master/release/scripts/templates_py/background_job.py
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
def getPythonArgs():
|
||||
|
||||
def get_python_args():
|
||||
|
||||
argv = sys.argv
|
||||
|
||||
|
@ -15,7 +16,7 @@ def getPythonArgs():
|
|||
argv = argv[argv.index("--") + 1:] # get all args after "--"
|
||||
|
||||
usage_text = (
|
||||
"Run Blend_My_NFTs headlessly from the command line\n"
|
||||
"Run Blend_My_NFTs heedlessly from the command line\n"
|
||||
"usage:\n"
|
||||
"blender -background --python <Path to BMNFTs __init__.py> -- --config-file <path to config file>"
|
||||
)
|
||||
|
@ -64,4 +65,4 @@ def getPythonArgs():
|
|||
help="Overwrite the logic file path in the config file"
|
||||
)
|
||||
|
||||
return (parser.parse_args(argv), parser)
|
||||
return parser.parse_args(argv), parser
|
|
@ -0,0 +1,553 @@
|
|||
import bpy
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import copy
|
||||
import logging
|
||||
import tempfile
|
||||
import platform
|
||||
import traceback
|
||||
from time import sleep
|
||||
from itertools import cycle
|
||||
from threading import Thread
|
||||
from shutil import get_terminal_size
|
||||
from collections import Counter, defaultdict
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ======== CONSTANTS ======== #
|
||||
|
||||
# Constants are used for storing or updating constant values that may need to be changes depending on system
|
||||
# requirements and different use-cases.
|
||||
|
||||
removeList = [".gitignore", ".DS_Store", "desktop.ini", ".ini"]
|
||||
|
||||
|
||||
def remove_file_by_extension(dirlist):
|
||||
"""
|
||||
Checks if a given directory list contains any of the files or file extensions listed above, if so, remove them
|
||||
from list and return a clean dir list. These files interfere with BMNFTs operations and should be removed
|
||||
whenever dealing with directories.
|
||||
"""
|
||||
|
||||
if str(type(dirlist)) == "<class 'list'>":
|
||||
dirlist = list(dirlist) # converts single string path to list if dir pasted as string
|
||||
|
||||
return_dirs = []
|
||||
for directory in dirlist:
|
||||
if not str(os.path.split(directory)[1]) in removeList:
|
||||
return_dirs.append(directory)
|
||||
|
||||
return return_dirs
|
||||
|
||||
|
||||
# TODO: fix colours in console logs and find a way to include coloured text in .txt file.
|
||||
|
||||
class TextColors:
|
||||
"""
|
||||
The colour of console messages.
|
||||
"""
|
||||
|
||||
OK = '\033[92m' # GREEN
|
||||
WARNING = '\033[93m' # YELLOW
|
||||
ERROR = '\033[91m' # RED
|
||||
RESET = '\033[0m' # RESET COLOR
|
||||
|
||||
|
||||
def save_result(result):
|
||||
"""
|
||||
Saves json result to json file at the specified path.
|
||||
"""
|
||||
file_name = "log.json"
|
||||
if platform.system() == "Linux" or platform.system() == "Darwin":
|
||||
path = os.path.join(os.path.join(os.path.expanduser('~')), 'Desktop', file_name)
|
||||
|
||||
if platform.system() == "Windows":
|
||||
path = os.path.join(os.environ["HOMEPATH"], "Desktop", file_name)
|
||||
|
||||
data = json.dumps(result, indent=1, ensure_ascii=True)
|
||||
with open(path, 'w') as outfile:
|
||||
outfile.write(data + '\n')
|
||||
|
||||
|
||||
# ======== GET COMBINATIONS ======== #
|
||||
|
||||
# This section retrieves the Scene hierarchy from the current Blender file.
|
||||
|
||||
|
||||
def get_hierarchy():
|
||||
"""
|
||||
Returns the hierarchy of a given Blender scene.
|
||||
"""
|
||||
|
||||
coll = bpy.context.scene.collection
|
||||
|
||||
script_ignore_collection = bpy.data.collections["Script_Ignore"]
|
||||
|
||||
list_all_coll_in_scene = []
|
||||
list_all_collections = []
|
||||
|
||||
def traverse_tree(t):
|
||||
yield t
|
||||
for child in t.children:
|
||||
yield from traverse_tree(child)
|
||||
|
||||
for c in traverse_tree(coll):
|
||||
list_all_coll_in_scene.append(c)
|
||||
|
||||
for i in list_all_coll_in_scene:
|
||||
list_all_collections.append(i.name)
|
||||
|
||||
list_all_collections.remove(script_ignore_collection.name)
|
||||
|
||||
if "Scene Collection" in list_all_collections:
|
||||
list_all_collections.remove("Scene Collection")
|
||||
|
||||
if "Master Collection" in list_all_collections:
|
||||
list_all_collections.remove("Master Collection")
|
||||
|
||||
def all_script_ignore(script_ignore_coll):
|
||||
# Removes all collections, sub collections in Script_Ignore collection from list_all_collections.
|
||||
|
||||
for collection in list(script_ignore_coll.children):
|
||||
list_all_collections.remove(collection.name)
|
||||
list_coll = list(collection.children)
|
||||
if len(list_coll) > 0:
|
||||
all_script_ignore(collection)
|
||||
|
||||
all_script_ignore(script_ignore_collection)
|
||||
list_all_collections.sort()
|
||||
|
||||
exclude = ["_"] # Excluding characters that identify a Variant
|
||||
attribute_collections = copy.deepcopy(list_all_collections)
|
||||
|
||||
def filter_num():
|
||||
"""
|
||||
This function removes items from 'attribute_collections' if they include values from the 'exclude' variable.
|
||||
It removes child collections from the parent collections in from the "list_all_collections" list.
|
||||
"""
|
||||
for x in attribute_collections:
|
||||
if any(i in x for i in exclude):
|
||||
attribute_collections.remove(x)
|
||||
|
||||
for i in range(len(list_all_collections)):
|
||||
filter_num()
|
||||
|
||||
attribute_variants = [x for x in list_all_collections if x not in attribute_collections]
|
||||
attribute_collections1 = copy.deepcopy(attribute_collections)
|
||||
|
||||
def attribute_data(att_vars):
|
||||
"""
|
||||
Creates a dictionary of each attribute
|
||||
"""
|
||||
all_att_data_list = {}
|
||||
for i in att_vars:
|
||||
# Check if name follows naming conventions:
|
||||
if int(i.count("_")) > 2 and int(i.split("_")[1]) > 0:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
|
||||
f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
raise Exception()
|
||||
|
||||
try:
|
||||
number = i.split("_")[1]
|
||||
name = i.split("_")[0]
|
||||
rarity = i.split("_")[2]
|
||||
except IndexError:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"There is a naming issue with the following Attribute/Variant: '{i}'\n"
|
||||
f"Review the naming convention of Attribute and Variant collections here:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
raise Exception()
|
||||
|
||||
all_att_data_list[i] = {"name": name, "number": number, "rarity": rarity}
|
||||
return all_att_data_list
|
||||
|
||||
variant_meta_data = attribute_data(attribute_variants)
|
||||
|
||||
hierarchy = {}
|
||||
for i in attribute_collections1:
|
||||
col_par_long = list(bpy.data.collections[str(i)].children)
|
||||
col_par_short = {}
|
||||
for x in col_par_long:
|
||||
col_par_short[x.name] = None
|
||||
hierarchy[i] = col_par_short
|
||||
|
||||
for a in hierarchy:
|
||||
for b in hierarchy[a]:
|
||||
for x in variant_meta_data:
|
||||
if str(x) == str(b):
|
||||
(hierarchy[a])[b] = variant_meta_data[x]
|
||||
|
||||
return hierarchy
|
||||
|
||||
|
||||
# ======== GET COMBINATIONS ======== #
|
||||
|
||||
# This section is used to get the number of combinations for checks and the UI display
|
||||
|
||||
def get_combinations():
|
||||
"""
|
||||
Returns "combinations", the number of all possible NFT DNA for a given Blender scene formatted to BMNFTs conventions
|
||||
combinations.
|
||||
"""
|
||||
|
||||
hierarchy = get_hierarchy()
|
||||
hierarchy_by_num = []
|
||||
|
||||
for i in hierarchy:
|
||||
# Ignore Collections with nothing in them
|
||||
if len(hierarchy[i]) != 0:
|
||||
hierarchy_by_num.append(len(hierarchy[i]))
|
||||
else:
|
||||
log.warning(f"\nThe following collection has been identified as empty: {i}")
|
||||
|
||||
combinations = 1
|
||||
for i in hierarchy_by_num:
|
||||
combinations = combinations * i
|
||||
|
||||
return combinations
|
||||
|
||||
|
||||
# ======== CHECKS ======== #
|
||||
|
||||
# This section is used to check the NFTRecord.json for duplicate NFT DNA and returns any found in the console.
|
||||
# It also checks the percentage each variant is chosen in the NFTRecord, then compares it with its rarity percentage
|
||||
# set in the .blend file.
|
||||
|
||||
# This section is provided for transparency. The accuracy of the rarity values you set in your .blend file as outlined
|
||||
# in the README.md file are dependent on the maxNFTs, and the maximum number of combinations of your NFT collection.
|
||||
|
||||
def check_scene(): # Not complete
|
||||
"""
|
||||
Checks if Blender file Scene follows the Blend_My_NFTs conventions. If not, raises error with all instances of
|
||||
violations.
|
||||
"""
|
||||
|
||||
script_ignore_exists = None # True if Script_Ignore collection exists in Blender scene
|
||||
attribute_naming_conventions = None # True if all attributes in Blender scene follow BMNFTs naming conventions
|
||||
variant_naming_conventions = None # True if all variants in Blender scene follow BMNFTs naming conventions
|
||||
object_placing_conventions = None # True if all objects are within either Script_Ignore or a variant collection
|
||||
|
||||
# script_ignore_exists:
|
||||
try:
|
||||
scriptIgnoreCollection = bpy.data.collections["Script_Ignore"]
|
||||
script_ignore_exists = True
|
||||
except KeyError:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"Add a Script_Ignore collection to your Blender scene and ensure the name is exactly 'Script_Ignore'. "
|
||||
f"For more information, see:"
|
||||
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n{TextColors.RESET}"
|
||||
)
|
||||
raise
|
||||
|
||||
hierarchy = get_hierarchy()
|
||||
collections = bpy.context.scene.collection
|
||||
|
||||
# attribute_naming_conventions
|
||||
|
||||
|
||||
def check_rarity(hierarchy, dna_list_formatted, save_path):
|
||||
"""Checks rarity percentage of each Variant, then sends it to RarityData.json in NFT_Data folder."""
|
||||
|
||||
dna_list = [list(i.keys())[0] for i in dna_list_formatted]
|
||||
num_nfts_generated = len(dna_list)
|
||||
num_dict = defaultdict(list)
|
||||
hierarchy.keys()
|
||||
|
||||
for i in dna_list:
|
||||
dna_split_list = i.split("-")
|
||||
|
||||
for j, k in zip(dna_split_list, hierarchy.keys()):
|
||||
num_dict[k].append(j)
|
||||
|
||||
num_dict = dict(num_dict)
|
||||
|
||||
for i in num_dict:
|
||||
count = dict(Counter(num_dict[i]))
|
||||
num_dict[i] = count
|
||||
|
||||
full_num_name = {}
|
||||
|
||||
for i in hierarchy:
|
||||
full_num_name[i] = {}
|
||||
for j in hierarchy[i]:
|
||||
variant_num = hierarchy[i][j]["number"]
|
||||
|
||||
full_num_name[i][variant_num] = j
|
||||
|
||||
complete_data = {}
|
||||
|
||||
for i, j in zip(full_num_name, num_dict):
|
||||
x = {}
|
||||
for k in full_num_name[i]:
|
||||
for l in num_dict[j]:
|
||||
if l == k:
|
||||
name = full_num_name[i][k]
|
||||
num = num_dict[j][l]
|
||||
x[name] = [(str(round(((num / num_nfts_generated) * 100), 2)) + "%"), str(num)]
|
||||
|
||||
complete_data[i] = x
|
||||
|
||||
# Saving Rarity data to console and log:
|
||||
x = f"\nPercentages for each Variant per Attribute:"
|
||||
for i in complete_data:
|
||||
x += f"\n\n{i}:"
|
||||
if complete_data[i]:
|
||||
for j in complete_data[i]:
|
||||
x += f"\n - {j}: {complete_data[i][j][0]} occurs {complete_data[i][j][1]} times."
|
||||
else:
|
||||
x += f"\n - Variants not selected."
|
||||
|
||||
log.info(x)
|
||||
|
||||
json_meta_data = json.dumps(complete_data, indent=1, ensure_ascii=True)
|
||||
|
||||
with open(os.path.join(save_path, "RarityData.json"), 'w') as outfile:
|
||||
outfile.write(json_meta_data + '\n')
|
||||
path = os.path.join(save_path, "RarityData.json")
|
||||
|
||||
log.info(
|
||||
f"\nRarity data has been saved to:\n{path}"
|
||||
)
|
||||
|
||||
|
||||
def check_duplicates(dna_list_formatted):
|
||||
"""Checks if there are duplicates in dna_list before NFTRecord.json is sent to JSON file."""
|
||||
dna_list = []
|
||||
for i in dna_list_formatted:
|
||||
dna_list.append(list(i.keys())[0])
|
||||
|
||||
duplicates = 0
|
||||
seen = set()
|
||||
|
||||
for x in dna_list:
|
||||
if x in seen:
|
||||
duplicates += 1
|
||||
seen.add(x)
|
||||
|
||||
if duplicates > 0:
|
||||
log.warning(
|
||||
f"\n{TextColors.WARNING}Blend_My_NFTs Warning:\n"
|
||||
f"{duplicates} duplicate NFT DNA was detected. This should not be possible. For more information, see:"
|
||||
f"\nhttps://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure"
|
||||
f"\n{TextColors.RESET}"
|
||||
)
|
||||
|
||||
log.info(f"\n\nDuplicate NFT DNA found: {duplicates}")
|
||||
|
||||
|
||||
def check_failed_batches(batch_json_save_path):
|
||||
fail_state = False
|
||||
failed_batch = None
|
||||
failed_dna = None
|
||||
failed_dna_index = None
|
||||
|
||||
if os.path.isdir(batch_json_save_path):
|
||||
batch_folders = remove_file_by_extension(os.listdir(batch_json_save_path))
|
||||
|
||||
for i in batch_folders:
|
||||
batch = json.load(open(os.path.join(batch_json_save_path, i)))
|
||||
nfts_in_batch = batch["nfts_in_batch"]
|
||||
if "Generation Save" in batch:
|
||||
dna_generated = batch["Generation Save"][-1]["DNA Generated"]
|
||||
if dna_generated is not None and dna_generated < nfts_in_batch:
|
||||
fail_state = True
|
||||
failed_batch = int(i.removeprefix("Batch").removesuffix(".json"))
|
||||
failed_dna = dna_generated
|
||||
|
||||
return fail_state, failed_batch, failed_dna, failed_dna_index
|
||||
|
||||
|
||||
# Raise Errors:
|
||||
def raise_error_num_batches(max_nfts, nfts_per_batch):
|
||||
"""Checks if number of Batches is less than maxNFTs, if not raises error."""
|
||||
|
||||
try:
|
||||
num_batches = max_nfts / nfts_per_batch
|
||||
return num_batches
|
||||
except ZeroDivisionError:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"The number of NFTs per Batch must be greater than ZERO."
|
||||
f"Please review your Blender scene and ensure it follows "
|
||||
f"the naming conventions and scene structure. For more information, "
|
||||
f"see:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure"
|
||||
f"\n{TextColors.RESET}"
|
||||
)
|
||||
raise ZeroDivisionError()
|
||||
|
||||
|
||||
def raise_error_zero_combinations():
|
||||
"""Checks if combinations is greater than 0, if so, raises error."""
|
||||
if get_combinations() == 0:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"The number of all possible combinations is ZERO. Please review your Blender scene and ensure it "
|
||||
f"follows the naming conventions and scene structure. For more information, see:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure"
|
||||
f"\n{TextColors.RESET}"
|
||||
)
|
||||
|
||||
raise ValueError()
|
||||
|
||||
|
||||
def raise_error_num_batches_greater_then(num_batches):
|
||||
if num_batches < 1:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"The number of Batches is less than 1. Please review your Blender scene and ensure it follows "
|
||||
f"the naming conventions and scene structure. For more information, "
|
||||
f"see:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure"
|
||||
f"\n{TextColors.RESET}"
|
||||
)
|
||||
raise ValueError()
|
||||
|
||||
|
||||
# Raise Warnings:
|
||||
def raise_warning_max_nfts(nfts_per_batch, collection_size):
|
||||
"""
|
||||
Prints warning if nfts_per_batch is greater than collection_size.
|
||||
"""
|
||||
|
||||
if nfts_per_batch > collection_size:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.WARNING}Blend_My_NFTs Warning:\n"
|
||||
f"The number of NFTs Per Batch you set is smaller than the NFT Collection Size you set."
|
||||
f"\n{TextColors.RESET}"
|
||||
)
|
||||
|
||||
raise ValueError()
|
||||
|
||||
|
||||
def raise_warning_collection_size(dna_list, collection_size):
|
||||
"""
|
||||
Prints warning if BMNFTs cannot generate requested number of NFTs from a given collection_size.
|
||||
"""
|
||||
|
||||
if len(dna_list) < collection_size:
|
||||
log.warning(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.WARNING} \nWARNING: \n"
|
||||
f"Blend_My_NFTs cannot generate {collection_size} NFTs."
|
||||
f" Only {len(dna_list)} NFT DNA were generated."
|
||||
|
||||
f"\nThis might be for a number of reasons:"
|
||||
f"\n a) Rarity is preventing combinations from being generated (See "
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#notes-on-rarity-and-weighted-variants).\n "
|
||||
f"\n b) Logic is preventing combinations from being generated (See "
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#logic).\n "
|
||||
f"\n c) The number of possible combinations of your NFT collection is too low. Add more Variants or "
|
||||
f"Attributes to increase the recommended collection size.\n "
|
||||
f"\n{TextColors.RESET}"
|
||||
)
|
||||
|
||||
|
||||
# ======== LOADING ANIMATION ======== #
|
||||
|
||||
# This section is used for the loading animation used in the system console.
|
||||
|
||||
class Loader:
|
||||
def __init__(self, desc="Loading...", end="Done!", timeout=0.1):
|
||||
"""
|
||||
A loader-like context manager
|
||||
|
||||
Args:
|
||||
desc (str, optional): The loader's description. Defaults to "Loading...".
|
||||
end (str, optional): Final print. Defaults to "Done!".
|
||||
timeout (float, optional): Sleep time between prints. Defaults to 0.1.
|
||||
"""
|
||||
self.desc = desc
|
||||
self.end = end
|
||||
self.timeout = timeout
|
||||
|
||||
self._thread = Thread(target=self._animate, daemon=True)
|
||||
self.steps = [
|
||||
" [== ]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
" [ ==]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
]
|
||||
self.done = False
|
||||
|
||||
def start(self):
|
||||
self._thread.start()
|
||||
return self
|
||||
|
||||
def _animate(self):
|
||||
for c in cycle(self.steps):
|
||||
if self.done:
|
||||
break
|
||||
print(f"\r{self.desc} {c}", flush=True, end="")
|
||||
sleep(self.timeout)
|
||||
|
||||
def __enter__(self):
|
||||
self.start()
|
||||
|
||||
def stop(self):
|
||||
self.done = True
|
||||
cols = get_terminal_size((80, 20)).columns
|
||||
print("\r" + " " * cols, end="", flush=True)
|
||||
print(f"\r{self.end}", flush=True)
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
# handle exceptions with those variables ^
|
||||
self.stop()
|
||||
|
||||
|
||||
def activate_logging():
|
||||
"""
|
||||
Used as an intermediate activated at runtime of the following operators: CreateData, ExportNFTs, ResumeFailedBatch,
|
||||
RefactorBatches, and ExportSettings. Must be independent of 'input' class to be safe, gets variables directly from
|
||||
bpy.
|
||||
"""
|
||||
|
||||
log_path = bpy.context.scene.input_tool.log_path
|
||||
if log_path:
|
||||
file_handler = logging.FileHandler(os.path.join(log_path, 'BMNFTs_Log.txt'), 'a')
|
||||
else:
|
||||
file_handler = logging.FileHandler(os.path.join(tempfile.gettempdir(), 'BMNFTs_Log.txt'), 'a')
|
||||
|
||||
formatter = logging.Formatter(
|
||||
'[%(asctime)s] [%(levelname)s] [%(filename)s > %(funcName)s() > Line:%(lineno)d]\n%(message)s\n'
|
||||
)
|
||||
file_handler.setFormatter(formatter)
|
||||
|
||||
log = logging.getLogger()
|
||||
for handler in log.handlers[:]:
|
||||
if isinstance(handler, logging.FileHandler):
|
||||
log.removeHandler(handler)
|
||||
if isinstance(handler, logging.StreamHandler):
|
||||
log.removeHandler(handler)
|
||||
log.addHandler(file_handler)
|
||||
|
||||
# Record log to console:
|
||||
console_handler = logging.StreamHandler(sys.stdout)
|
||||
log.addHandler(console_handler)
|
||||
|
||||
if bpy.context.scene.input_tool.enable_debug:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
else:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
|
@ -0,0 +1,95 @@
|
|||
import logging
|
||||
|
||||
import bpy
|
||||
import json
|
||||
|
||||
from main import dna_generator, exporter
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# TODO: migrate this code to the dna_generator.py(send_to_record) and exporter.py(render_and_save) to simplify render
|
||||
# process into one file.
|
||||
|
||||
|
||||
def send_to_record(input, reverse_order=False):
|
||||
if input.enable_logic:
|
||||
if input.enable_logic_json and input.logic_file:
|
||||
input.logic_file = json.load(open(input.logic_file))
|
||||
|
||||
if input.enable_logic_json and not input.logic_file:
|
||||
log.error(
|
||||
f"No Logic.json file path set. Please set the file path to your Logic.json file."
|
||||
)
|
||||
raise
|
||||
|
||||
if not input.enable_logic_json:
|
||||
scn = bpy.context.scene
|
||||
if reverse_order:
|
||||
input.logic_file = {}
|
||||
num = 1
|
||||
for i in range(scn.logic_fields_index, -1, -1):
|
||||
item = scn.logic_fields[i]
|
||||
|
||||
item_list1 = item.item_list1
|
||||
rule_type = item.rule_type
|
||||
item_list2 = item.item_list2
|
||||
input.logic_file[f"Rule-{num}"] = {
|
||||
"IF": item_list1.split(','),
|
||||
rule_type: item_list2.split(',')
|
||||
}
|
||||
num += 1
|
||||
else:
|
||||
input.logic_file = {}
|
||||
num = 1
|
||||
for item in scn.logic_fields:
|
||||
item_list1 = item.item_list1
|
||||
rule_type = item.rule_type
|
||||
item_list2 = item.item_list2
|
||||
input.logic_file[f"Rule-{num}"] = {
|
||||
"IF": item_list1.split(','),
|
||||
rule_type: item_list2.split(',')
|
||||
}
|
||||
num += 1
|
||||
|
||||
dna_generator.send_to_record(
|
||||
input.collection_size,
|
||||
input.nfts_per_batch,
|
||||
input.save_path,
|
||||
input.enable_rarity,
|
||||
input.enable_logic,
|
||||
input.logic_file,
|
||||
input.enable_materials,
|
||||
input.materials_file,
|
||||
input.blend_my_nfts_output,
|
||||
input.batch_json_save_path,
|
||||
input.enable_debug,
|
||||
input.log_path
|
||||
)
|
||||
|
||||
|
||||
def render_and_save_nfts(input, reverse_order=False):
|
||||
if input.enable_custom_fields:
|
||||
scn = bpy.context.scene
|
||||
if reverse_order:
|
||||
for i in range(scn.custom_metadata_fields_index, -1, -1):
|
||||
item = scn.custom_metadata_fields[i]
|
||||
if item.field_name in list(input.custom_fields.keys()):
|
||||
log.error(
|
||||
f"A duplicate of '{item.field_name}' was found. Ensure all Custom Metadata field "
|
||||
f"Names are unique."
|
||||
)
|
||||
raise ValueError()
|
||||
else:
|
||||
input.custom_fields[item.field_name] = item.field_value
|
||||
else:
|
||||
for item in scn.custom_metadata_fields:
|
||||
if item.field_name in list(input.custom_fields.keys()):
|
||||
log.error(
|
||||
f"A duplicate of '{item.field_name}' was found. Ensure all Custom Metadata field "
|
||||
f"Names are unique."
|
||||
)
|
||||
raise ValueError()
|
||||
else:
|
||||
input.custom_fields[item.field_name] = item.field_value
|
||||
|
||||
exporter.render_and_save_nfts(input)
|
|
@ -1,69 +0,0 @@
|
|||
from itertools import cycle
|
||||
from shutil import get_terminal_size
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
|
||||
|
||||
class Loader:
|
||||
def __init__(self, desc="Loading...", end="Done!", timeout=0.1):
|
||||
"""
|
||||
A loader-like context manager
|
||||
|
||||
Args:
|
||||
desc (str, optional): The loader's description. Defaults to "Loading...".
|
||||
end (str, optional): Final print. Defaults to "Done!".
|
||||
timeout (float, optional): Sleep time between prints. Defaults to 0.1.
|
||||
"""
|
||||
self.desc = desc
|
||||
self.end = end
|
||||
self.timeout = timeout
|
||||
|
||||
self._thread = Thread(target=self._animate, daemon=True)
|
||||
self.steps = [
|
||||
" [== ]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
" [ ==]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
" [ == ]",
|
||||
]
|
||||
self.done = False
|
||||
|
||||
def start(self):
|
||||
self._thread.start()
|
||||
return self
|
||||
|
||||
def _animate(self):
|
||||
for c in cycle(self.steps):
|
||||
if self.done:
|
||||
break
|
||||
print(f"\r{self.desc} {c}", flush=True, end="")
|
||||
sleep(self.timeout)
|
||||
|
||||
def __enter__(self):
|
||||
self.start()
|
||||
|
||||
def stop(self):
|
||||
self.done = True
|
||||
cols = get_terminal_size((80, 20)).columns
|
||||
print("\r" + " " * cols, end="", flush=True)
|
||||
print(f"\r{self.end}", flush=True)
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
# handle exceptions with those variables ^
|
||||
self.stop()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with Loader("Loading with context manager..."):
|
||||
for i in range(10):
|
||||
sleep(0.25)
|
||||
|
||||
loader = Loader("Loading with object...", "That was fast!", 0.05).start()
|
||||
for i in range(10):
|
||||
sleep(0.25)
|
||||
loader.stop()
|
|
@ -1,19 +1,23 @@
|
|||
# Purpose:
|
||||
# The purpose of this file is to add logic and rules to the DNA that are sent to the NFTRecord.json file in DNA_Generator.py
|
||||
# The purpose of this file is to add logic and rules to the DNA that are sent to the NFTRecord.json file in
|
||||
# dna_generator.py
|
||||
|
||||
import bpy
|
||||
import random
|
||||
import logging
|
||||
import traceback
|
||||
import collections
|
||||
|
||||
from .Constants import bcolors, removeList, remove_file_by_extension, save_result
|
||||
from .helpers import TextColors
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def reconstructDNA(deconstructedDNA):
|
||||
reconstructed_DNA = ""
|
||||
for a in deconstructedDNA:
|
||||
def reconstruct_dna(deconstructed_dna):
|
||||
reconstructed_dna = ""
|
||||
for a in deconstructed_dna:
|
||||
num = "-" + str(a)
|
||||
reconstructed_DNA += num
|
||||
return ''.join(reconstructed_DNA.split('-', 1))
|
||||
reconstructed_dna += num
|
||||
return ''.join(reconstructed_dna.split('-', 1))
|
||||
|
||||
|
||||
def get_var_info(variant, hierarchy):
|
||||
|
@ -33,11 +37,11 @@ def get_var_info(variant, hierarchy):
|
|||
return [name, order_number, rarity_number, attribute, attribute_index] # list of Var info sent back
|
||||
|
||||
|
||||
def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type, enableRarity):
|
||||
# Check if Variants in if_dict are in deconstructed_DNA, if so return if_list_selected = True:
|
||||
def apply_rules_to_dna(hierarchy, deconstructed_dna, if_dict, result_dict, result_dict_type, enable_rarity):
|
||||
# Check if Variants in if_dict are in deconstructed_dna, if so return if_list_selected = True:
|
||||
if_list_selected = False
|
||||
for a in deconstructed_DNA:
|
||||
attribute_index = deconstructed_DNA.index(a)
|
||||
for a in deconstructed_dna:
|
||||
attribute_index = deconstructed_dna.index(a)
|
||||
attribute = list(hierarchy.keys())[attribute_index]
|
||||
|
||||
for b in hierarchy[attribute]:
|
||||
|
@ -49,23 +53,23 @@ def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, resul
|
|||
if_list_selected = True
|
||||
|
||||
# Apply changes in accordance to Variants in 'result_dict' and 'if_list_selected' bool above:
|
||||
for a in deconstructed_DNA:
|
||||
attribute_index = deconstructed_DNA.index(a)
|
||||
for a in deconstructed_dna:
|
||||
attribute_index = deconstructed_dna.index(a)
|
||||
attribute = list(hierarchy.keys())[attribute_index]
|
||||
|
||||
if attribute in result_dict: # Check if Attribute from DNA is in 'result_dict'
|
||||
|
||||
# If 'a' is a full Attribute and Variants in if_dict not selected, set 'a' to empty (0):
|
||||
if list(result_dict[attribute].keys()) == list(hierarchy[attribute].keys()) and not if_list_selected:
|
||||
deconstructed_DNA[attribute_index] = "0"
|
||||
deconstructed_dna[attribute_index] = "0"
|
||||
|
||||
# If 'a' is a full Attribute and result_dict_type = "NOT", set 'a' to empty (0):
|
||||
if list(result_dict[attribute].keys()) == list(
|
||||
hierarchy[attribute].keys()) and if_list_selected and result_dict_type == "NOT":
|
||||
deconstructed_DNA[attribute_index] = "0"
|
||||
deconstructed_dna[attribute_index] = "0"
|
||||
|
||||
# If Variants in if_dict are selected, set each attribute in 'result_dict' to a random or rarity selected Variant from
|
||||
# 'result_dict[attribute]' variant_list:
|
||||
# If Variants in if_dict are selected, set each attribute in 'result_dict' to a random or rarity selected Variant
|
||||
# from 'result_dict[attribute]' variant_list:
|
||||
if if_list_selected:
|
||||
|
||||
# Invert 'items_returned' if 'NOT' rule is selected:
|
||||
|
@ -91,60 +95,64 @@ def apply_rules_to_dna(hierarchy, deconstructed_DNA, if_dict, result_dict, resul
|
|||
|
||||
if attribute in result_dict: # Check if Attribute from DNA is in 'then_dict'
|
||||
|
||||
number_List_Of_i = []
|
||||
rarity_List_Of_i = []
|
||||
ifZeroBool = None
|
||||
variantNum = None
|
||||
number_list_of_i = []
|
||||
rarity_list_of_i = []
|
||||
if_zero_bool = None
|
||||
variant_num = None
|
||||
|
||||
for b in variant_list:
|
||||
number = b.split("_")[1]
|
||||
rarity = b.split("_")[2]
|
||||
|
||||
number_List_Of_i.append(int(number))
|
||||
rarity_List_Of_i.append(float(rarity))
|
||||
number_list_of_i.append(int(number))
|
||||
rarity_list_of_i.append(float(rarity))
|
||||
|
||||
for b in rarity_List_Of_i:
|
||||
for b in rarity_list_of_i:
|
||||
if b == 0:
|
||||
ifZeroBool = True
|
||||
if_zero_bool = True
|
||||
elif b != 0:
|
||||
ifZeroBool = False
|
||||
if_zero_bool = False
|
||||
|
||||
if enableRarity:
|
||||
if enable_rarity:
|
||||
try:
|
||||
if ifZeroBool:
|
||||
variantNum = random.choices(number_List_Of_i, k=1)
|
||||
elif not ifZeroBool:
|
||||
variantNum = random.choices(number_List_Of_i, weights=rarity_List_Of_i, k=1)
|
||||
if if_zero_bool:
|
||||
variant_num = random.choices(number_list_of_i, k=1)
|
||||
elif not if_zero_bool:
|
||||
variant_num = random.choices(number_list_of_i, weights=rarity_list_of_i, k=1)
|
||||
except IndexError:
|
||||
raise IndexError(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"An issue was found within the Attribute collection '{a}'. For more information on Blend_My_NFTs compatible scenes, "
|
||||
f"see:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"An issue was found within the Attribute collection '{a}'. For more information on "
|
||||
f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
raise IndexError()
|
||||
else:
|
||||
try:
|
||||
variantNum = random.choices(number_List_Of_i, k=1)
|
||||
variant_num = random.choices(number_list_of_i, k=1)
|
||||
except IndexError:
|
||||
raise IndexError(
|
||||
f"\n{bcolors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"An issue was found within the Attribute collection '{a}'. For more information on Blend_My_NFTs compatible scenes, "
|
||||
f"see:\n{bcolors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"An issue was found within the Attribute collection '{a}'. For more information on "
|
||||
f"Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
deconstructed_DNA[int(attribute_index)] = str(variantNum[0])
|
||||
raise IndexError()
|
||||
deconstructed_dna[int(attribute_index)] = str(variant_num[0])
|
||||
|
||||
return deconstructed_DNA
|
||||
return deconstructed_dna
|
||||
|
||||
|
||||
def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type):
|
||||
# Check if Variants in 'if_dict' found in deconstructed_DNA:
|
||||
if_bool = False # True if Variant in 'deconstructed_DNA' found in 'if_dict'
|
||||
def get_rule_break_type(hierarchy, deconstructed_dna, if_dict, result_dict, result_dict_type):
|
||||
# Check if Variants in 'if_dict' found in deconstructed_dna:
|
||||
if_bool = False # True if Variant in 'deconstructed_dna' found in 'if_dict'
|
||||
for a in if_dict: # Attribute in 'if_dict'
|
||||
for b in if_dict[a]: # Variant in if_dict[Attribute]
|
||||
var_order_num = str(if_dict[a][b][1]) # Order number of 'b' (Variant)
|
||||
dna_order_num = str(
|
||||
deconstructed_DNA[if_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_DNA
|
||||
deconstructed_dna[if_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_dna
|
||||
|
||||
if var_order_num == dna_order_num: # If DNA selected Variants found inside IF list variants:
|
||||
if_bool = True
|
||||
|
@ -153,14 +161,14 @@ def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, resu
|
|||
continue
|
||||
break
|
||||
|
||||
# Check if Variants in 'result_dict' found in deconstructed_DNA:
|
||||
# Check if Variants in 'result_dict' found in deconstructed_dna:
|
||||
full_att_bool = False
|
||||
result_bool = False # True if Variant in 'deconstructed_DNA' found in 'result_dict'
|
||||
result_bool = False # True if Variant in 'deconstructed_dna' found in 'result_dict'
|
||||
for a in result_dict: # Attribute in 'result_dict'
|
||||
for b in result_dict[a]: # Variant in if_dict[Attribute]
|
||||
var_order_num = str(result_dict[a][b][1]) # Order number of 'b' (Variant)
|
||||
dna_order_num = str(
|
||||
deconstructed_DNA[result_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_DNA
|
||||
deconstructed_dna[result_dict[a][b][4]]) # Order Number of 'b's attribute in deconstructed_dna
|
||||
if var_order_num == dna_order_num: # If DNA selected Variants found inside THEN list variants:
|
||||
if list(result_dict[a].keys()) == list(hierarchy[a].keys()):
|
||||
full_att_bool = True
|
||||
|
@ -173,20 +181,20 @@ def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, resu
|
|||
# Rule Bool return summary:
|
||||
violates_rule = False
|
||||
|
||||
# If Variants in 'if_dict' found in deconstructed_DNA and Variants in 'result_dict' not found in deconstructed_DNA:
|
||||
# If Variants in 'if_dict' found in deconstructed_dna and Variants in 'result_dict' not found in deconstructed_dna:
|
||||
if if_bool and not result_bool:
|
||||
violates_rule = True
|
||||
|
||||
elif if_bool and result_bool and result_dict_type == "NOT":
|
||||
violates_rule = True
|
||||
|
||||
# If Variants in 'if_dict' not found in deconstructed_DNA, and 'result_dict' variants are found in deconstructed_DNA,
|
||||
# and they are a part of a full Attribute in 'then_dict'
|
||||
# If Variants in 'if_dict' not found in deconstructed_dna, and 'result_dict' variants are found in
|
||||
# deconstructed_dna, and they are a part of a full Attribute in 'then_dict'
|
||||
elif not if_bool and result_bool and full_att_bool:
|
||||
violates_rule = True
|
||||
|
||||
# If Variants in 'if_dict' not found in deconstructed_DNA, but Variants in 'then_dict' are found in deconstructed_DNA,
|
||||
# and don't make up a full Attribute:
|
||||
# If Variants in 'if_dict' not found in deconstructed_dna, but Variants in 'then_dict' are found in
|
||||
# deconstructed_dna, and don't make up a full Attribute:
|
||||
# elif not if_bool and result_bool and not full_att_bool:
|
||||
# violates_rule = False
|
||||
|
||||
|
@ -194,7 +202,9 @@ def get_rule_break_type(hierarchy, deconstructed_DNA, if_dict, result_dict, resu
|
|||
|
||||
|
||||
def create_dicts(hierarchy, rule_list_items, result_dict_type):
|
||||
# Example of output structure:
|
||||
"""
|
||||
Example of output structure:
|
||||
|
||||
structure = {
|
||||
"attribute1": {
|
||||
"variant1": [
|
||||
|
@ -229,6 +239,7 @@ def create_dicts(hierarchy, rule_list_items, result_dict_type):
|
|||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
items_returned = collections.defaultdict(dict)
|
||||
for a in rule_list_items:
|
||||
|
@ -249,41 +260,50 @@ def create_dicts(hierarchy, rule_list_items, result_dict_type):
|
|||
return dict(items_returned)
|
||||
|
||||
|
||||
def logicafyDNAsingle(hierarchy, singleDNA, logicFile, enableRarity, enableMaterials):
|
||||
deconstructed_DNA = singleDNA.split("-")
|
||||
didReconstruct = True
|
||||
originalDNA = str(singleDNA)
|
||||
def logicafy_dna_single(hierarchy, single_dna, logic_file, enable_rarity):
|
||||
deconstructed_dna = single_dna.split("-")
|
||||
did_reconstruct = True
|
||||
original_dna = str(single_dna)
|
||||
|
||||
while didReconstruct:
|
||||
didReconstruct = False
|
||||
for rule in logicFile:
|
||||
while did_reconstruct:
|
||||
did_reconstruct = False
|
||||
for rule in logic_file:
|
||||
# Items from 'IF' key for a given rule
|
||||
if_dict = create_dicts(hierarchy, logicFile[rule]["IF"], "IF")
|
||||
if_dict = create_dicts(hierarchy, logic_file[rule]["IF"], "IF")
|
||||
|
||||
result_dict_type = ""
|
||||
if "THEN" in logicFile[rule]:
|
||||
if "THEN" in logic_file[rule]:
|
||||
result_dict_type = "THEN"
|
||||
|
||||
if "NOT" in logicFile[rule]:
|
||||
if "NOT" in logic_file[rule]:
|
||||
result_dict_type = "NOT"
|
||||
|
||||
result_dict = create_dicts(hierarchy, logicFile[rule][result_dict_type], result_dict_type)
|
||||
result_dict = create_dicts(hierarchy, logic_file[rule][result_dict_type], result_dict_type)
|
||||
|
||||
# Change 'then_bool' to 'result_bool'
|
||||
violates_rule, if_bool, then_bool, full_att_bool = get_rule_break_type(hierarchy, deconstructed_DNA,
|
||||
if_dict, result_dict,
|
||||
result_dict_type)
|
||||
violates_rule, if_bool, then_bool, full_att_bool = get_rule_break_type(
|
||||
hierarchy,
|
||||
deconstructed_dna,
|
||||
if_dict,
|
||||
result_dict,
|
||||
result_dict_type,
|
||||
)
|
||||
if violates_rule:
|
||||
# print(f"======={deconstructed_DNA} VIOLATES RULE======")
|
||||
log.debug(f"======={deconstructed_dna} VIOLATES RULE======")
|
||||
|
||||
deconstructed_DNA = apply_rules_to_dna(
|
||||
hierarchy, deconstructed_DNA, if_dict, result_dict, result_dict_type, enableRarity
|
||||
deconstructed_dna = apply_rules_to_dna(
|
||||
hierarchy,
|
||||
deconstructed_dna,
|
||||
if_dict,
|
||||
result_dict,
|
||||
result_dict_type,
|
||||
enable_rarity
|
||||
)
|
||||
|
||||
newDNA = reconstructDNA(deconstructed_DNA)
|
||||
if newDNA != originalDNA:
|
||||
originalDNA = str(newDNA)
|
||||
didReconstruct = True
|
||||
new_dna = reconstruct_dna(deconstructed_dna)
|
||||
if new_dna != original_dna:
|
||||
original_dna = str(new_dna)
|
||||
did_reconstruct = True
|
||||
break
|
||||
|
||||
return str(reconstructDNA(deconstructed_DNA))
|
||||
return str(reconstruct_dna(deconstructed_dna))
|
|
@ -0,0 +1,146 @@
|
|||
# Purpose:
|
||||
# The purpose of this file is to apply the materials a user sets in a given .json file to the Variant collection objects
|
||||
# also specified in the .json file. The Materialized DNA is then returned in the following format: 1-1-1:1-1-1
|
||||
# Where the numbers right of the ":" are the material numbers applied to the respective Variants to the left of the ":"
|
||||
|
||||
import json
|
||||
import random
|
||||
import logging
|
||||
import traceback
|
||||
from .helpers import TextColors
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def select_material(material_list, variant, enable_rarity):
|
||||
"""Selects a material from a passed material list. """
|
||||
material_list_of_i = [] # List of Material names instead of order numbers
|
||||
rarity_list_of_i = []
|
||||
if_zero_bool = None
|
||||
|
||||
for material in material_list:
|
||||
# Material Order Number comes from index in the Material List in materials.json for a given Variant.
|
||||
# material_order_num = list(material_list.keys()).index(material)
|
||||
|
||||
material_list_of_i.append(material)
|
||||
|
||||
material_rarity_percent = material_list[material]
|
||||
rarity_list_of_i.append(float(material_rarity_percent))
|
||||
|
||||
# print(f"MATERIAL_LIST_OF_I:{material_list_of_i}")
|
||||
# print(f"RARITY_LIST_OF_I:{rarity_list_of_i}")
|
||||
|
||||
for b in rarity_list_of_i:
|
||||
if b == 0:
|
||||
if_zero_bool = True
|
||||
elif b != 0:
|
||||
if_zero_bool = False
|
||||
|
||||
if enable_rarity:
|
||||
try:
|
||||
if if_zero_bool:
|
||||
selected_material = random.choices(material_list_of_i, k=1)
|
||||
elif not if_zero_bool:
|
||||
selected_material = random.choices(material_list_of_i, weights=rarity_list_of_i, k=1)
|
||||
except IndexError:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"An issue was found within the Material List of the Variant collection '{variant}'. For more "
|
||||
f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
raise IndexError()
|
||||
else:
|
||||
try:
|
||||
selected_material = random.choices(material_list_of_i, k=1)
|
||||
except IndexError:
|
||||
log.error(
|
||||
f"\n{traceback.format_exc()}"
|
||||
f"\n{TextColors.ERROR}Blend_My_NFTs Error:\n"
|
||||
f"An issue was found within the Material List of the Variant collection '{variant}'. For more "
|
||||
f"information on Blend_My_NFTs compatible scenes, see:\n{TextColors.RESET}"
|
||||
f"https://github.com/torrinworx/Blend_My_NFTs#blender-file-organization-and-structure\n"
|
||||
)
|
||||
raise IndexError()
|
||||
|
||||
return selected_material[0], material_list
|
||||
|
||||
|
||||
def get_variant_att_index(variant, hierarchy):
|
||||
variant_attribute = None
|
||||
|
||||
for attribute in hierarchy:
|
||||
for variant_h in hierarchy[attribute]:
|
||||
if variant_h == variant:
|
||||
variant_attribute = attribute
|
||||
|
||||
attribute_index = list(hierarchy.keys()).index(variant_attribute)
|
||||
variant_order_num = variant.split("_")[1]
|
||||
return attribute_index, variant_order_num
|
||||
|
||||
|
||||
def match_dna_to_variant(hierarchy, single_dna):
|
||||
"""
|
||||
Matches each DNA number separated by "-" to its attribute, then its variant.
|
||||
"""
|
||||
|
||||
list_attributes = list(hierarchy.keys())
|
||||
list_dna_decunstructed = single_dna.split('-')
|
||||
dna_dictionary = {}
|
||||
|
||||
for i, j in zip(list_attributes, list_dna_decunstructed):
|
||||
dna_dictionary[i] = j
|
||||
|
||||
for x in dna_dictionary:
|
||||
for k in hierarchy[x]:
|
||||
k_num = hierarchy[x][k]["number"]
|
||||
if k_num == dna_dictionary[x]:
|
||||
dna_dictionary.update({x: k})
|
||||
return dna_dictionary
|
||||
|
||||
|
||||
def apply_materials(hierarchy, single_dna, materials_file, enable_rarity):
|
||||
"""
|
||||
DNA with applied material example: "1-1:1-1" <Normal DNA>:<Selected Material for each Variant>
|
||||
|
||||
The Material DNA will select the material for the Variant order number in the NFT DNA based on the Variant Material
|
||||
list in the Variant_Material.json file.
|
||||
"""
|
||||
|
||||
single_dna_dict = match_dna_to_variant(hierarchy, single_dna)
|
||||
materials_file = json.load(open(materials_file))
|
||||
deconstructed_material_dna = {}
|
||||
|
||||
for a in single_dna_dict:
|
||||
complete = False
|
||||
for b in materials_file:
|
||||
if single_dna_dict[a] == b:
|
||||
material_name, material_list, = select_material(materials_file[b]['Material List'], b, enable_rarity)
|
||||
|
||||
# Gets the Order Number of the Material
|
||||
material_order_num = list(material_list.keys()).index(material_name)
|
||||
|
||||
deconstructed_material_dna[a] = str(material_order_num + 1)
|
||||
complete = True
|
||||
if not complete:
|
||||
deconstructed_material_dna[a] = "0"
|
||||
|
||||
# This section is now incorrect and needs updating:
|
||||
|
||||
# Make Attributes have the same materials:
|
||||
# Order your Attributes alphabetically, then assign each Attribute a number, starting with 0. So Attribute 'A' = 0,
|
||||
# Attribute 'B' = 1, 'C' = 2, 'D' = 3, etc. For each pair you want to equal another, add its number it to this list:
|
||||
# synced_material_attributes = [1, 2]
|
||||
#
|
||||
# first_mat = deconstructed_material_dna[synced_material_attributes[0]]
|
||||
# for i in synced_material_attributes:
|
||||
# deconstructed_material_dna[i] = first_mat
|
||||
|
||||
material_dna = ""
|
||||
for a in deconstructed_material_dna:
|
||||
num = "-" + str(deconstructed_material_dna[a])
|
||||
material_dna += num
|
||||
material_dna = ''.join(material_dna.split('-', 1))
|
||||
|
||||
return f"{single_dna}:{material_dna}"
|
|
@ -0,0 +1,191 @@
|
|||
# Some code in this file was generously sponsored by the amazing team over at SolSweepers!
|
||||
# Feel free to check out their amazing project and see how they are using Blend_My_NFTs:
|
||||
# https://discord.gg/QTT7dzcuVs
|
||||
|
||||
# Purpose:
|
||||
# This file returns the specified metadata format to the exporter.py for a given NFT DNA.
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
|
||||
def send_metadata_to_json(meta_data_dict, save_path, file_name):
|
||||
json_metadata = json.dumps(meta_data_dict, indent=1, ensure_ascii=True)
|
||||
with open(os.path.join(save_path, f"{file_name}.json"), 'w') as outfile:
|
||||
outfile.write(json_metadata + '\n')
|
||||
|
||||
|
||||
def strip_nums(variant):
|
||||
variant = str(variant).split('_')[0]
|
||||
return variant
|
||||
|
||||
|
||||
# Cardano Template
|
||||
def create_cardano_metadata(
|
||||
name,
|
||||
order_num,
|
||||
nft_dna,
|
||||
nft_variants,
|
||||
material_attributes,
|
||||
custom_fields,
|
||||
enable_custom_fields,
|
||||
cardano_description,
|
||||
cardano_metadata_path
|
||||
):
|
||||
|
||||
meta_data_dict_cardano = {"721": {
|
||||
"<policy_id>": {
|
||||
name: {
|
||||
"name": name,
|
||||
"image": "<ipfs_link>",
|
||||
"mediaType": "<mime_type>",
|
||||
"description": cardano_description,
|
||||
}
|
||||
},
|
||||
"version": "1.0"
|
||||
}}
|
||||
|
||||
# Variants and Attributes:
|
||||
for i in nft_variants:
|
||||
meta_data_dict_cardano["721"]["<policy_id>"][name][i] = strip_nums(nft_variants[i])
|
||||
|
||||
# Material Variants and Attributes:
|
||||
for i in material_attributes:
|
||||
meta_data_dict_cardano["721"]["<policy_id>"][name][i] = material_attributes[i]
|
||||
|
||||
# Custom Fields:
|
||||
if enable_custom_fields:
|
||||
for i in custom_fields:
|
||||
meta_data_dict_cardano["721"]["<policy_id>"][name][i] = custom_fields[i]
|
||||
|
||||
send_metadata_to_json(
|
||||
meta_data_dict_cardano,
|
||||
cardano_metadata_path,
|
||||
name
|
||||
)
|
||||
|
||||
|
||||
# Solana Template
|
||||
def createSolanaMetaData(
|
||||
name,
|
||||
order_num,
|
||||
nft_dna,
|
||||
nft_variants,
|
||||
material_attributes,
|
||||
custom_fields,
|
||||
enable_custom_fields,
|
||||
solana_description,
|
||||
solana_metadata_path
|
||||
):
|
||||
metadata_dict_solana = {
|
||||
"name": name,
|
||||
"symbol": "",
|
||||
"description": solana_description,
|
||||
"seller_fee_basis_points": None,
|
||||
"image": "",
|
||||
"animation_url": "",
|
||||
"external_url": ""
|
||||
}
|
||||
|
||||
attributes = []
|
||||
|
||||
# Variant and Attributes:
|
||||
for i in nft_variants:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": strip_nums(nft_variants[i])
|
||||
}
|
||||
attributes.append(dictionary)
|
||||
|
||||
# Material Variants and Attributes:
|
||||
for i in material_attributes:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": material_attributes[i]
|
||||
}
|
||||
attributes.append(dictionary)
|
||||
|
||||
# Custom Fields:
|
||||
if enable_custom_fields:
|
||||
for i in custom_fields:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": custom_fields[i]
|
||||
}
|
||||
attributes.append(dictionary)
|
||||
|
||||
metadata_dict_solana["attributes"] = attributes
|
||||
metadata_dict_solana["collection"] = {
|
||||
"name": "",
|
||||
"family": ""
|
||||
}
|
||||
|
||||
metadata_dict_solana["properties"] = {
|
||||
"files": [{"uri": "", "type": ""}],
|
||||
"category": "",
|
||||
"creators": [{"address": "", "share": None}]
|
||||
}
|
||||
|
||||
send_metadata_to_json(
|
||||
metadata_dict_solana,
|
||||
solana_metadata_path,
|
||||
name
|
||||
)
|
||||
|
||||
|
||||
# ERC721 Template
|
||||
def create_erc721_meta_data(
|
||||
name,
|
||||
order_num,
|
||||
nft_dna,
|
||||
nft_variants,
|
||||
material_attributes,
|
||||
custom_fields,
|
||||
enable_custom_fields,
|
||||
erc721_description,
|
||||
erc721_metadata_path
|
||||
):
|
||||
|
||||
metadata_dict_erc721 = {
|
||||
"name": name,
|
||||
"description": erc721_description,
|
||||
"image": "",
|
||||
"attributes": None,
|
||||
}
|
||||
|
||||
attributes = []
|
||||
|
||||
# Variants and Attributes:
|
||||
for i in nft_variants:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": strip_nums(nft_variants[i])
|
||||
}
|
||||
|
||||
attributes.append(dictionary)
|
||||
|
||||
# Material Variants and Attributes:
|
||||
for i in material_attributes:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": material_attributes[i]
|
||||
}
|
||||
|
||||
attributes.append(dictionary)
|
||||
|
||||
# Custom Fields:
|
||||
if enable_custom_fields:
|
||||
for i in custom_fields:
|
||||
dictionary = {
|
||||
"trait_type": i,
|
||||
"value": custom_fields[i]
|
||||
}
|
||||
attributes.append(dictionary)
|
||||
|
||||
metadata_dict_erc721["attributes"] = attributes
|
||||
|
||||
send_metadata_to_json(
|
||||
metadata_dict_erc721,
|
||||
erc721_metadata_path,
|
||||
name
|
||||
)
|
|
@ -0,0 +1,49 @@
|
|||
# Purpose:
|
||||
# This file goes through all batches, renames, and sorts all nft files to a Complete_Collection folder in Blend_My_NFTs
|
||||
|
||||
import os
|
||||
import json
|
||||
import shutil
|
||||
import logging
|
||||
|
||||
from .helpers import remove_file_by_extension
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def reformat_nft_collection(refactor_panel_input):
|
||||
complete_coll_path = os.path.join(refactor_panel_input.save_path, "Blend_My_NFTs Output", "Complete_Collection")
|
||||
|
||||
if not os.path.exists(complete_coll_path):
|
||||
os.mkdir(complete_coll_path)
|
||||
|
||||
batch_list_dirty = os.listdir(refactor_panel_input.nft_batch_save_path)
|
||||
batch_list = remove_file_by_extension(batch_list_dirty)
|
||||
collection_info = {"Total Time": 0}
|
||||
|
||||
for folder in batch_list:
|
||||
batch_info = json.load(open(os.path.join(refactor_panel_input.nft_batch_save_path, folder, "batch_info.json")))
|
||||
collection_info[os.path.basename(folder)] = batch_info
|
||||
collection_info["Total Time"] = collection_info["Total Time"] + batch_info["Batch Render Time"]
|
||||
|
||||
file_list_dirty = os.listdir(os.path.join(refactor_panel_input.nft_batch_save_path, folder))
|
||||
filelist = remove_file_by_extension(file_list_dirty)
|
||||
|
||||
for mediaTypeFolder in filelist:
|
||||
if mediaTypeFolder != "batch_info.json":
|
||||
media_type_folder_dir = os.path.join(refactor_panel_input.nft_batch_save_path, folder, mediaTypeFolder)
|
||||
|
||||
for i in os.listdir(media_type_folder_dir):
|
||||
destination = os.path.join(complete_coll_path, mediaTypeFolder)
|
||||
if not os.path.exists(destination):
|
||||
os.makedirs(destination)
|
||||
|
||||
shutil.move(os.path.join(media_type_folder_dir, i), destination)
|
||||
|
||||
collection_info = json.dumps(collection_info, indent=1, ensure_ascii=True)
|
||||
with open(os.path.join(complete_coll_path, "collection_info.json"), 'w') as outfile:
|
||||
outfile.write(collection_info + '\n')
|
||||
|
||||
log.info(f"All NFT files stored and sorted to the Complete_Collection folder in {refactor_panel_input.save_path}")
|
||||
|
||||
shutil.rmtree(refactor_panel_input.nft_batch_save_path)
|
Ładowanie…
Reference in New Issue