add PY scripts to tile and render parts of a blender frame

This commit is contained in:
Andrew Scobie 2018-06-21 11:33:46 +12:00
Родитель 4f3df49708
Коммит 06f145c793
7 изменённых файлов: 658 добавлений и 6 удалений

Просмотреть файл

@ -0,0 +1,185 @@
{
"templateMetadata":{
"description":"Distributed render Blender frames over multiple nodes."
},
"parameters": {
"poolId": {
"type": "string",
"defaultValue": "blender-win-tile-render",
"metadata": {
"description": "The ID of the pool on which to run the job."
}
},
"jobName": {
"type": "string",
"defaultValue": "blender-tile",
"metadata": {
"description": "The name of the Azure Batch job, also used to prefix rendered outputs"
}
},
"inputData": {
"type": "string",
"defaultValue": "fgrp-tile-test",
"metadata": {
"description": "The file group where the input data is stored",
"advancedType": "file-group"
}
},
"inputDataSas": {
"type": "string",
"metadata": {
"description": "SAS token for reading the data from the selected file group",
"advancedType": "file-group-sas",
"dependsOn": "inputData"
}
},
"blendFile": {
"type": "string",
"defaultValue": "bmw27_cpu.blend",
"metadata": {
"description": "The Blender scene file to be rendered",
"advancedType": "file-in-file-group",
"dependsOn": "inputData",
"wildcards": "*.blend"
}
},
"optionalParameters": {
"type": "string",
"defaultValue": " ",
"metadata": {
"description": "Optional parameters to pass to blender, e.g. --scene <name>"
}
},
"frameStart": {
"type": "int",
"defaultValue": 1,
"metadata": {
"description": "The first frame to render"
}
},
"frameEnd": {
"type": "int",
"defaultValue": 1,
"metadata": {
"description": "The last frame to render"
}
},
"outputs": {
"type": "string",
"defaultValue": "fgrp-outputs",
"metadata": {
"description": "The file group where outputs will be stored",
"advancedType": "file-group"
}
},
"outputSas": {
"type": "string",
"metadata": {
"description": "Writable SAS token for uploading outputs to the container",
"advancedType": "file-group-write-sas",
"dependsOn": "outputs"
}
}
},
"job": {
"type": "Microsoft.Batch/batchAccounts/jobs",
"properties": {
"id": "[parameters('jobName')]",
"poolInfo": {
"poolId": "[parameters('poolId')]"
},
"displayName": "",
"usesTaskDependencies": true,
"onAllTasksComplete": "terminateJob",
"jobPreparationTask":{
"userIdentity": {
"autoUser": {
"scope": "pool",
"elevationLevel": "admin"
}
},
"commandLine": "/bin/bash -c 'echo blender'",
"resourceFiles": [
{
"source": {
"fileGroup": "[parameters('inputData')]",
"prefix": ""
},
"filePath": "."
}
]
},
"jobReleaseTask":{
"userIdentity":{
"autoUser": {
"scope": "pool",
"elevationLevel": "admin"
}
},
"commandLine":"/bin/bash -c 'DEL /S /Q \"%AZ_BATCH_JOB_PREP_WORKING_DIR%\" & exit /b 0'"
},
"jobManagerTask":{
"id": "_manager",
"displayName": "job manager task",
"allowLowPriorityNode" : true,
"runExclusive": true,
"environmentSettings": [
{ "name": "OUTPUT_FORMAT", "value": "PNG" },
{ "name": "BLEND_FILE", "value": "[parameters('blendFile')]" },
{ "name": "OPTIONAL_PARAMS", "value": "[parameters('optionalParameters')]" },
{ "name": "INPUT_CONTAINER_SAS", "value": "[parameters('inputDataSas')]" },
{ "name": "OUTPUT_CONTAINER_SAS", "value": "[parameters('outputSas')]" },
{ "name": "X_TILES", "value": "6" },
{ "name": "Y_TILES", "value": "6" },
{ "name": "FRAME_START", "value": "[parameters('frameStart')]" },
{ "name": "FRAME_END", "value": "[parameters('frameEnd')]" }
],
"authenticationTokenSettings":{
"access": ["job"]
},
"userIdentity": {
"autoUser": {
"scope": "pool",
"elevationLevel": "admin"
}
},
"commandLine": "/bin/bash -c 'env;python $AZ_BATCH_TASK_WORKING_DIR/scripts/python-frame-splitter.py'",
"killJobOnCompletion": false,
"resourceFiles": [
{
"source": {
"fileGroup": "[parameters('inputData')]"
},
"filePath": "scripts/"
}
],
"outputFiles": [
{
"filePattern": "../stdout.txt",
"destination": {
"autoStorage": {
"fileGroup": "[parameters('outputs')]",
"path": "[parameters('jobName')]/logs/manager/stdout.txt"
}
},
"uploadOptions": {
"uploadCondition": "taskCompletion"
}
},
{
"filePattern": "../stderr.txt",
"destination": {
"autoStorage": {
"fileGroup": "[parameters('outputs')]",
"path": "[parameters('jobName')]/logs/manager/stderr.txt"
}
},
"uploadOptions": {
"uploadCondition": "taskCompletion"
}
}
]
}
}
}
}

Просмотреть файл

@ -0,0 +1,122 @@
{
"parameters": {
"poolId": {
"type": "string",
"defaultValue": "blender-blah",
"metadata": {
"description": "Id of the pool"
}
},
"dedicatedVmCount": {
"type": "int",
"defaultValue": 1,
"metadata": {
"description": "The number of dedicated nodes in the Azure Batch pool"
}
},
"lowPriorityVmCount": {
"type": "int",
"defaultValue": 0,
"metadata": {
"description": "The number of low priority nodes in the Azure Batch pool"
}
},
"nodeSize": {
"type": "string",
"metadata": {
"description": "The size of the nodes that run the tasks in the pool"
},
"defaultValue": "Standard_D3_v2",
"allowedValues": [
"Standard_A8",
"Standard_A9",
"Standard_A10",
"Standard_A11",
"Standard_D1_v2",
"Standard_D2_v2",
"Standard_D3_v2",
"Standard_D4_v2",
"Standard_D5_v2",
"Standard_D11_v2",
"Standard_D12_v2",
"Standard_D13_v2",
"Standard_D14_v2",
"Standard_D15_v2",
"Standard_D2_v3",
"Standard_D4_v3",
"Standard_D8_v3",
"Standard_D16_v3",
"Standard_D32_v3",
"Standard_D64_v3",
"Standard_E2_v3",
"Standard_E4_v3",
"Standard_E8_v3",
"Standard_E16_v3",
"Standard_E32_v3",
"Standard_E64_v3",
"Standard_F1",
"Standard_F2",
"Standard_F4",
"Standard_F8",
"Standard_F16",
"Standard_G1",
"Standard_G2",
"Standard_G3",
"Standard_G4",
"Standard_G5",
"Standard_H8",
"Standard_H16",
"Standard_H8m",
"Standard_H16m",
"Standard_H16r",
"Standard_H16mr",
"Standard_L4",
"Standard_L8",
"Standard_L16",
"Standard_L32",
"Standard_M64s",
"Standard_M64ms",
"Standard_M128s",
"Standard_NC6",
"Standard_NC12",
"Standard_NC24",
"Standard_NC6s_v2",
"Standard_NC12s_v2",
"Standard_NC24s_v2",
"Standard_NC6s_v3",
"Standard_NC12s_v3",
"Standard_NC24s_v3"
]
}
},
"variables": {
"osType": {
"imageReference": {
"publisher": "Canonical",
"offer": "UbuntuServer",
"sku": "16.04.0-LTS",
"version": "latest"
},
"nodeAgentSKUId": "batch.node.ubuntu 16.04"
}
},
"pool": {
"id": "[parameters('poolId')]",
"vmSize": "[parameters('nodeSize')]",
"virtualMachineConfiguration": "[variables('osType')]",
"targetDedicatedNodes": "[parameters('dedicatedVmCount')]",
"targetLowPriorityNodes": "[parameters('lowPriorityVmCount')]",
"enableAutoScale": false,
"startTask": {
"commandLine": "/bin/bash -c 'apt-get update;apt-get install -y imagemagick;apt-get install -y blender;apt-get install -y python-pip;pip install azure-batch;'",
"waitForSuccess": true,
"maxTaskRetryCount": 0,
"userIdentity": {
"autoUser": {
"scope": "pool",
"elevationLevel": "admin"
}
}
}
}
}

Просмотреть файл

@ -69,7 +69,7 @@
"displayName": "[parameters('blendFile')]",
"onAllTasksComplete": "terminateJob",
"jobPreparationTask":{
"userIdentity":{
"userIdentity": {
"autoUser": {
"scope": "pool"
}

Просмотреть файл

@ -80,15 +80,12 @@
"Standard_NC6",
"Standard_NC12",
"Standard_NC24",
"Standard_NC24r",
"Standard_NC6s_v2",
"Standard_NC12s_v2",
"Standard_NC24s_v2",
"Standard_NC24rs_v2",
"Standard_NC6s_v3",
"Standard_NC12s_v3",
"Standard_NC24s_v3",
"Standard_NC24rs_v3"
"Standard_NC24s_v3"
]
}
},

Просмотреть файл

@ -53,7 +53,7 @@
},
"pool": {
"id": "[parameters('poolId')]",
"displayName": "3ds Max pool running: [parameters('poolId')]",
"displayName": "Blender GPU pool",
"vmSize": "[parameters('nodeSize')]",
"virtualMachineConfiguration": "[variables('osType')]",
"targetDedicatedNodes": "[parameters('dedicatedVmCount')]",

Просмотреть файл

@ -0,0 +1,273 @@
import os
from azure.batch import BatchServiceClient
from azure.batch import models
from azure.batch.batch_auth import SharedKeyCredentials
from azure.common.credentials import BasicTokenAuthentication, OAuthTokenAuthentication
class Tile:
def __init__(self, tile, current_x, current_y):
self.tile = tile
self.current_x = current_x
self.current_y = current_y
def main():
print("------------------------------------")
print("job manager task reporting for duty")
# get the environment variables for the job manager task
x_tiles = int(os.environ["X_TILES"])
y_tiles = int(os.environ["Y_TILES"])
frame_start = int(os.environ["FRAME_START"])
frame_end = int(os.environ["FRAME_END"])
batch_account_url = os.environ["AZ_BATCH_ACCOUNT_URL"]
# create Batch client
# when running inside a task with authentication enabled, this token allows access to the rest of the job
credentials = OAuthTokenAuthentication(client_id=None, token={"access_token": os.environ["AZ_BATCH_AUTHENTICATION_TOKEN"]})
batch_client = BatchServiceClient(credentials, base_url=batch_account_url)
# create the tile collection, can be used for every frame
tiles = create_tiles(x_tiles, y_tiles)
# create the task collections for each frame
current_task_id = 1
for frame in range(frame_start, frame_end + 1):
print("generating tasks for frame: {}, with current_task_id: {}".format(frame, current_task_id))
current_task_id = create_tasks_for_frame(frame, current_task_id, tiles, batch_client)
print("finished creating tasks for frame: {}, with current_task_id: {}\n".format(frame, current_task_id))
def create_tiles(x_tiles, y_tiles):
tiles = []
total = x_tiles * y_tiles
counter = 0
print("create_tiles from '{}' -> '{}'".format(0, total))
for j in range(x_tiles):
for i in range(y_tiles):
tiles.append(Tile(counter, i, j))
counter += 1
return tiles
def create_tasks_for_frame(frame, current_task_id, tiles, batch_client):
job_id = os.environ["AZ_BATCH_JOB_ID"]
depend_start = current_task_id
tasks = []
# create a task for every tile
for tile in tiles:
print("task: {} - tile: {}, current_x: {}, current_y: {}".format(current_task_id, tile.tile, tile.current_x, tile.current_y))
tasks.append(create_task(frame, current_task_id, job_id, tile.tile + 1, tile.current_x, tile.current_y))
current_task_id += 1
# create merge task to join the tiles back together
depend_end = current_task_id - 1
print("merge task: {}, depend_start: {} - depend_end: {}".format(current_task_id, depend_start, depend_end))
tasks.append(create_merge_task(frame, current_task_id, job_id, depend_start, depend_end))
current_task_id += 1
# todo: [tats] - yield return add task collection
submit_task_collection(batch_client, job_id, tasks, frame)
return current_task_id
def create_task(frame, task_id, job_id, tile_num, current_x, current_y):
blend_file = os.environ["BLEND_FILE"]
output_sas = os.environ["OUTPUT_CONTAINER_SAS"]
optionalParams = os.environ["OPTIONAL_PARAMS"]
output_format = os.environ["OUTPUT_FORMAT"]
command_line = "blender -b \"$AZ_BATCH_JOB_PREP_WORKING_DIR/{}\" -P \"$AZ_BATCH_JOB_PREP_WORKING_DIR/python-task-manager.py\" -y -t 0 -F {} -E CYCLES {}".format(blend_file, output_format, optionalParams)
return models.TaskAddParameter(
id=pad_number(task_id),
display_name="frame: {}, tile: {}".format(frame, tile_num),
command_line="/bin/bash -c '{}'".format(command_line),
environment_settings=[
models.EnvironmentSetting("X_TILES", os.environ["X_TILES"]),
models.EnvironmentSetting("Y_TILES", os.environ["Y_TILES"]),
models.EnvironmentSetting("BLEND_FILE", os.environ["BLEND_FILE"]),
models.EnvironmentSetting("CURRENT_FRAME", str(frame)),
models.EnvironmentSetting("CURRENT_TILE", str(tile_num)),
models.EnvironmentSetting("CURRENT_X", str(current_x)),
models.EnvironmentSetting("CURRENT_Y", str(current_y))
],
output_files=[
models.OutputFile(
file_pattern="../stdout.txt",
destination=models.OutputFileDestination(
container=models.OutputFileBlobContainerDestination(
container_url=output_sas,
path="{}/logs/frame-{}/tile-{}.stdout.log".format(job_id, frame, pad_number(tile_num, 3))
)
),
upload_options=models.OutputFileUploadOptions(models.OutputFileUploadCondition.task_completion)
),
models.OutputFile(
file_pattern="../stderr.txt",
destination=models.OutputFileDestination(
container=models.OutputFileBlobContainerDestination(
container_url=output_sas,
path="{}/logs/frame-{}/tile-{}.stderr.log".format(job_id, frame, pad_number(tile_num, 3))
)
),
upload_options=models.OutputFileUploadOptions(models.OutputFileUploadCondition.task_completion)
),
models.OutputFile(
file_pattern="../fileuploaderr.txt",
destination=models.OutputFileDestination(
container=models.OutputFileBlobContainerDestination(
container_url=output_sas,
path="{}/logs/frame-{}/tile-{}.file_upload_stderr.log".format(job_id, frame, pad_number(tile_num, 3))
)
),
upload_options=models.OutputFileUploadOptions(models.OutputFileUploadCondition.task_completion)
),
models.OutputFile(
file_pattern="../fileuploadout.txt",
destination=models.OutputFileDestination(
container=models.OutputFileBlobContainerDestination(
container_url=output_sas,
path="{}/logs/frame-{}/tile-{}.file_upload_stdout.log".format(job_id, frame, pad_number(tile_num, 3))
)
),
upload_options=models.OutputFileUploadOptions(models.OutputFileUploadCondition.task_completion)
),
models.OutputFile(
file_pattern="tile_*",
destination=models.OutputFileDestination(
container=models.OutputFileBlobContainerDestination(
container_url=output_sas,
path="{}/outputs/frame-{}".format(job_id, frame)
)
),
upload_options=models.OutputFileUploadOptions(models.OutputFileUploadCondition.task_success)
)
])
def create_merge_task(frame, task_id, job_id, depend_start, depend_end):
"""
Azure Batch task that executes the ImageMagick `convert` command
line to combine all of the output tiles into a single output image.
This task uses the task dependency model to make sure it
doesn't execute before it's dependent tasks have completed. This way
we know all of the output image tiles will exist.
:param frame: Frame number of the scene that this merge task is
processing.
:type id: int
:param task_id: Identifier of the task.
:type task_id: str
:param job_id: Unique identifier of the job. Job identifiers are unique
within a single Azure Batch account.
:type job_id: str
:param depend_start: First task id of the dependency sequence. If each
frame is split into 16 tiles, then every 17th task will be a merge task
and that merge task will be dependent on the preceeding 16 tasks.
tile tasks 1 - 16, then merge, then tiles 18 - 34, then merge, etc.
:type depend_start: int
:param depend_end: Final task id of the dependency sequence. Explanation
for param `depend_start` applies here as well.
:type depend_end: int
"""
x_tiles = int(os.environ["X_TILES"])
y_tiles = int(os.environ["X_TILES"])
output_sas = os.environ["OUTPUT_CONTAINER_SAS"]
working_dir = os.environ["AZ_BATCH_TASK_WORKING_DIR"]
output_format = os.environ["OUTPUT_FORMAT"]
print("working_dir: {}".format(working_dir))
command_line = "convert $AZ_BATCH_TASK_WORKING_DIR/tile_* -flatten frame_{}.{}".format(frame, get_file_extension(output_format))
print("executing: {}".format(command_line))
return models.TaskAddParameter(
id=pad_number(task_id),
display_name="frame: {} - merge task".format(frame),
command_line="/bin/bash -c '{}'".format(command_line),
environment_settings=[
models.EnvironmentSetting("X_TILES", str(x_tiles)),
models.EnvironmentSetting("Y_TILES", str(y_tiles))
],
depends_on=models.TaskDependencies(task_id_ranges=[
models.TaskIdRange(depend_start, depend_end)
]),
resource_files=get_resource_files(x_tiles, y_tiles, frame),
output_files=[
models.OutputFile(
file_pattern="../stdout.txt",
destination=models.OutputFileDestination(
container=models.OutputFileBlobContainerDestination(
container_url=output_sas,
path="{}/logs/frame-{}/merge.stdout.log".format(job_id, frame)
)
),
upload_options=models.OutputFileUploadOptions(models.OutputFileUploadCondition.task_completion)
),
models.OutputFile(
file_pattern="../stderr.txt",
destination=models.OutputFileDestination(
container=models.OutputFileBlobContainerDestination(
container_url=output_sas,
path="{}/logs/frame-{}/merge.stderr.log".format(job_id, frame)
)
),
upload_options=models.OutputFileUploadOptions(models.OutputFileUploadCondition.task_completion)
),
models.OutputFile(
file_pattern="frame_*",
destination=models.OutputFileDestination(
container=models.OutputFileBlobContainerDestination(
container_url=output_sas,
path="{}/outputs/final".format(job_id)
)
),
upload_options=models.OutputFileUploadOptions(models.OutputFileUploadCondition.task_success)
)
])
def pad_number(number, len=6):
return str(number).zfill(len)
def get_file_extension(blender_output_format):
# just png for now
return blender_output_format.lower()
def get_resource_files(x_tiles, y_tiles, frame):
# hard coded to PNG at the moment
tile_count = x_tiles * y_tiles
output_format = os.environ["OUTPUT_FORMAT"]
output_sas = os.environ["OUTPUT_CONTAINER_SAS"]
job_id = os.environ["AZ_BATCH_JOB_ID"]
sas_parts = output_sas.split("?")
files = []
for num in range(1, tile_count + 1):
# TODO: Change me to dynamic file extension
filename = "tile_{}.png".format(str(num).zfill(3))
path_to_file = "{}/outputs/frame-{}/{}".format(job_id, frame, filename)
files.append(models.ResourceFile("{}/{}?{}".format(sas_parts[0], path_to_file, sas_parts[1]), filename))
return files
def submit_task_collection(batch_client, job_id, tasks, frame):
print("submitting: {} tasks to job: {}, for frame: {}".format(str(len(tasks)), job_id, frame))
try:
batch_client.task.add_collection(job_id=job_id, value=tasks)
except models.BatchErrorException as ex:
print("got an error adding tasks: {}".format(str(ex)))
for errorDetail in ex.inner_exception.values:
print("detail: {}".format(str(errorDetail)))
raise ex
if __name__ == '__main__':
main()

Просмотреть файл

@ -0,0 +1,75 @@
import bpy
import os
import sys
def main():
try:
print("-------------------------------")
print("task manager reporting for duty")
print("-------------------------------")
job_id = os.environ["AZ_BATCH_JOB_ID"]
current_tile = int(os.environ["CURRENT_TILE"])
batch_account_url = os.environ["AZ_BATCH_ACCOUNT_URL"]
task_id = os.environ["AZ_BATCH_TASK_ID"]
print("blender with file: {}".format(bpy.data.filepath))
print("job_id: {}, task_id: {}, batch_account_url: {}".format(job_id, task_id, batch_account_url))
sys.stdout.flush()
render_tile(current_tile)
except Exception as ex:
print("Failed to render tile: {}, with error: {}".format(current_tile, ex))
raise ex
def render_tile(current_tile):
# read geometry settings for this tile
x_tiles = int(os.environ["X_TILES"])
y_tiles = int(os.environ["Y_TILES"])
current_x = int(os.environ["CURRENT_X"])
current_y = int(os.environ["CURRENT_Y"])
print("x_tiles: {}, y_tiles: {}, current_x: {}, current_y: {}, tile: {}".format(x_tiles, y_tiles, current_x, current_y, current_tile))
current_frame = int(os.environ["CURRENT_FRAME"])
print("setting current frame to: {}".format(current_frame))
bpy.context.scene.frame_current = current_frame
file_format = bpy.context.scene.render.image_settings.file_format
if file_format in ("OPEN_EXR", "OPEN_EXR_MULTILAYER"):
file_format = "exr"
print("file format: {}".format(file_format))
total_tiles = x_tiles * y_tiles
min_x = current_x / x_tiles
max_x = (current_x + 1) / x_tiles
min_y = 1 - (current_y + 1) / y_tiles
max_y = 1 - current_y / y_tiles
print("rendering on host: {}".format(os.environ["AZ_BATCH_NODE_ID"]))
print("rendering tile '{}' of '{}' - min_x: {}, max_x: {} || min_y: {}, max_y: {}".format(current_tile, total_tiles, min_x, max_x, min_y, max_y))
sys.stdout.flush()
# use border render and set the coordinates
bpy.context.scene.render.use_border = True
bpy.context.scene.render.border_min_x = min_x
bpy.context.scene.render.border_max_x = max_x
bpy.context.scene.render.border_min_y = min_y
bpy.context.scene.render.border_max_y = max_y
# kick off the render
bpy.ops.render.render()
# todo: get extension from scene
directory = os.environ["AZ_BATCH_TASK_WORKING_DIR"]
save_path = "{}/tile_{}.{}".format(directory, str(current_tile).zfill(3), file_format.lower())
print("Saving to: {}".format(save_path))
sys.stdout.flush()
bpy.data.images["Render Result"].save_render(save_path)
if __name__ == '__main__':
main()