- Fix integration test and load config from .cfg
- Add basic test scene - Support scene files at the current directory (no subpath)
This commit is contained in:
Родитель
aa418a9667
Коммит
601a03469b
|
@ -72,10 +72,18 @@ def format_scene_path(scene_file, os_flavor):
|
|||
be on the render node.
|
||||
"""
|
||||
scene_path = get_remote_file_path(scene_file)(os_flavor)
|
||||
if os_flavor == OperatingSystem.windows:
|
||||
return "X:\\\\" + scene_path + '\\\\' + os.path.basename(scene_file)
|
||||
if os_flavor == OperatingSystem.windows:
|
||||
path = "X:\\\\"
|
||||
if scene_path:
|
||||
path += scene_path + '\\\\'
|
||||
path += os.path.basename(scene_file)
|
||||
return path
|
||||
else:
|
||||
return "/X/" + scene_path + '/' + os.path.basename(scene_file)
|
||||
path = "/X/"
|
||||
if scene_path:
|
||||
path += scene_path + '/'
|
||||
path += os.path.basename(scene_file)
|
||||
return path
|
||||
|
||||
|
||||
def get_default_output_path():
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
global proc renderPrep()
|
||||
{
|
||||
dirmap -en true;
|
||||
}
|
Двоичный файл не отображается.
|
@ -0,0 +1,68 @@
|
|||
//Maya 2017 Project Definition
|
||||
|
||||
workspace -fr "fluidCache" "cache/nCache/fluid";
|
||||
workspace -fr "JT_ATF" "data";
|
||||
workspace -fr "images" "images";
|
||||
workspace -fr "offlineEdit" "scenes/edits";
|
||||
workspace -fr "STEP_ATF Export" "data";
|
||||
workspace -fr "furShadowMap" "renderData/fur/furShadowMap";
|
||||
workspace -fr "INVENTOR_ATF Export" "data";
|
||||
workspace -fr "scripts" "scripts";
|
||||
workspace -fr "STL_ATF" "data";
|
||||
workspace -fr "DAE_FBX" "data";
|
||||
workspace -fr "shaders" "renderData/shaders";
|
||||
workspace -fr "NX_ATF" "data";
|
||||
workspace -fr "furFiles" "renderData/fur/furFiles";
|
||||
workspace -fr "CATIAV5_ATF Export" "data";
|
||||
workspace -fr "OBJ" "data";
|
||||
workspace -fr "FBX export" "data";
|
||||
workspace -fr "furEqualMap" "renderData/fur/furEqualMap";
|
||||
workspace -fr "BIF" "data";
|
||||
workspace -fr "DAE_FBX export" "data";
|
||||
workspace -fr "CATIAV5_ATF" "data";
|
||||
workspace -fr "SAT_ATF Export" "data";
|
||||
workspace -fr "movie" "movies";
|
||||
workspace -fr "ASS Export" "data";
|
||||
workspace -fr "move" "data";
|
||||
workspace -fr "mayaAscii" "scenes";
|
||||
workspace -fr "autoSave" "autosave";
|
||||
workspace -fr "NX_ATF Export" "data";
|
||||
workspace -fr "sound" "sound";
|
||||
workspace -fr "mayaBinary" "scenes";
|
||||
workspace -fr "timeEditor" "Time Editor";
|
||||
workspace -fr "JT_ATF Export" "data";
|
||||
workspace -fr "iprImages" "renderData/iprImages";
|
||||
workspace -fr "FBX" "data";
|
||||
workspace -fr "renderData" "renderData";
|
||||
workspace -fr "CATIAV4_ATF" "data";
|
||||
workspace -fr "fileCache" "cache/nCache";
|
||||
workspace -fr "eps" "data";
|
||||
workspace -fr "3dPaintTextures" "sourceimages/3dPaintTextures";
|
||||
workspace -fr "STL_ATF Export" "data";
|
||||
workspace -fr "mel" "scripts";
|
||||
workspace -fr "translatorData" "data";
|
||||
workspace -fr "particles" "cache/particles";
|
||||
workspace -fr "scene" "scenes";
|
||||
workspace -fr "SAT_ATF" "data";
|
||||
workspace -fr "PROE_ATF" "data";
|
||||
workspace -fr "WIRE_ATF Export" "data";
|
||||
workspace -fr "sourceImages" "sourceimages";
|
||||
workspace -fr "clips" "clips";
|
||||
workspace -fr "furImages" "renderData/fur/furImages";
|
||||
workspace -fr "INVENTOR_ATF" "data";
|
||||
workspace -fr "STEP_ATF" "data";
|
||||
workspace -fr "depth" "renderData/depth";
|
||||
workspace -fr "IGES_ATF Export" "data";
|
||||
workspace -fr "sceneAssembly" "sceneAssembly";
|
||||
workspace -fr "IGES_ATF" "data";
|
||||
workspace -fr "teClipExports" "Time Editor/Clip Exports";
|
||||
workspace -fr "ASS" "data";
|
||||
workspace -fr "audio" "sound";
|
||||
workspace -fr "bifrostCache" "cache/bifrost";
|
||||
workspace -fr "Alembic" "data";
|
||||
workspace -fr "illustrator" "data";
|
||||
workspace -fr "diskCache" "data";
|
||||
workspace -fr "WIRE_ATF" "data";
|
||||
workspace -fr "templates" "assets";
|
||||
workspace -fr "OBJexport" "data";
|
||||
workspace -fr "furAttrMap" "renderData/fur/furAttrMap";
|
|
@ -0,0 +1,7 @@
|
|||
[AzureBatch]
|
||||
batch_url =
|
||||
batch_account =
|
||||
batch_key =
|
||||
storage_account =
|
||||
storage_key =
|
||||
|
|
@ -4,28 +4,44 @@
|
|||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
import ConfigParser
|
||||
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
|
||||
from environment import MAYA_IMAGES
|
||||
import azurebatchutils as utils
|
||||
from azure.batch_extensions import _file_utils as fileutils
|
||||
|
||||
import azure.batch_extensions as batch
|
||||
from azure.batch_extensions import models
|
||||
from azure.batch.batch_auth import SharedKeyCredentials
|
||||
from azure.storage.blob.blockblobservice import BlockBlobService
|
||||
|
||||
STORAGE_ACCOUNT = os.environ['AZURE_STORAGE_ACCOUNT']
|
||||
STORAGE_KEY = os.environ['AZURE_STORAGE_ACCESS_KEY']
|
||||
BATCH_ENDPOINT = os.environ['AZURE_BATCH_ENDPOINT']
|
||||
BATCH_ACCOUNT = os.environ['AZURE_BATCH_ACCOUNT']
|
||||
BATCH_KEY = os.environ['AZURE_BATCH_ACCESS_KEY']
|
||||
SAMPLE_DIR = os.path.join(os.path.dirname(__file__), 'test_scene')
|
||||
POOL_ID = "Maya_Pool_766c5218-a5a7-424d-9868-aebb43f98627" # The OS of the pool will determine whether the job is run with the linux or windows templates.
|
||||
|
||||
scriptDir = os.path.dirname(__file__)
|
||||
|
||||
cfg = ConfigParser.ConfigParser()
|
||||
cfg.read(os.path.join(scriptDir, 'data', 'azure_batch_test.ini'))
|
||||
|
||||
STORAGE_ACCOUNT = cfg.get('AzureBatch', 'storage_account')
|
||||
STORAGE_KEY = cfg.get('AzureBatch', 'storage_key')
|
||||
BATCH_ENDPOINT = cfg.get('AzureBatch', 'batch_url')
|
||||
BATCH_ACCOUNT = cfg.get('AzureBatch', 'batch_account')
|
||||
BATCH_KEY = cfg.get('AzureBatch', 'batch_key')
|
||||
|
||||
SAMPLE_DIR = os.path.join(scriptDir, 'basic_test_scene')
|
||||
TEMPLATE_DIR = os.path.abspath('azure_batch_maya/templates')
|
||||
SCRIPT_DIR = os.path.abspath('azure_batch_maya/scripts/tools')
|
||||
POOL_ID = "" # The OS of the pool will determine whether the job is run with the linux or windows templates.
|
||||
|
||||
os.environ['AZUREBATCH_MODULES'] = os.path.abspath('azure_batch_maya/modules')#os.path.join(os.path.split(scriptDir)[0], 'azure_batch_maya/Modules')
|
||||
|
||||
def os_flavor(pool_image):
|
||||
windows_offers = [value['offer'] for value in MAYA_IMAGES.values() if 'windows' in value['node_sku_id']]
|
||||
|
@ -37,15 +53,27 @@ def os_flavor(pool_image):
|
|||
else:
|
||||
raise ValueError('Selected pool is not using a valid Maya image.')
|
||||
|
||||
def generate_sas_token(storage_client, file_group):
|
||||
"""Generate SAS token for file group container with read and list
|
||||
permissions.
|
||||
TODO: Move this into BatchExtensions file utils.
|
||||
"""
|
||||
container_name = fileutils.get_container_name(file_group)
|
||||
container_url = fileutils.generate_container_sas_token(
|
||||
container_name,
|
||||
storage_client,
|
||||
permission='rl')
|
||||
return container_url
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Setup client
|
||||
storage_client = BlockBlobService(STORAGE_ACCOUNT, STORAGE_KEY, endpoint_suffix="core.windows.net")
|
||||
credentials = SharedKeyCredentials(BATCH_ACCOUNT, BATCH_KEY)
|
||||
client = batch.BatchExtensionsClient(credentials, base_url=BATCH_ENDPOINT, storage_client=storage_client)
|
||||
|
||||
|
||||
# Setup test render input data
|
||||
scene_file = 'juggernaut.ma'
|
||||
scene_file = 'test_scene.mb'
|
||||
maya_data = 'maya-data-{}'.format(uuid.uuid4())
|
||||
client.file.upload(SAMPLE_DIR, maya_data, flatten=True)
|
||||
client.file.upload(os.path.join(SCRIPT_DIR, 'generate_thumbnails.py'), maya_data, flatten=True)
|
||||
|
@ -60,20 +88,22 @@ if __name__ == '__main__':
|
|||
batch_parameters = {'id': job_id}
|
||||
batch_parameters['displayName'] = "Maya Integration Test using {}".format(os_flavor)
|
||||
batch_parameters['metadata'] = [{"name": "JobType", "value": "Maya"}]
|
||||
template_file = os.path.join(TEMPLATE_DIR, 'arnold-basic-{}.json'.format(os_flavor.lower()))
|
||||
template_file = os.path.join(TEMPLATE_DIR, 'mayaSoftware-basic-{}.json'.format(os_flavor.lower()))
|
||||
batch_parameters['applicationTemplateInfo'] = {'filePath': template_file}
|
||||
application_params = {}
|
||||
batch_parameters['applicationTemplateInfo']['parameters'] = application_params
|
||||
|
||||
application_params['outputs'] = job_id
|
||||
application_params['sceneFile'] = utils.format_scene_path(scene_file, os_flavor)
|
||||
application_params['sceneFile'] = utils.format_scene_path(scene_file, utils.OperatingSystem[os_flavor.lower()])
|
||||
application_params['projectData'] = maya_data
|
||||
application_params['assetScript'] = client.file.generate_sas_url(maya_data, 'asset_map_{}.mel'.format(os_flavor.lower()))
|
||||
application_params['assetScript'] = client.file.generate_sas_url(maya_data, 'asset_map.mel'.format(os_flavor.lower()))
|
||||
application_params['thumbScript'] = client.file.generate_sas_url(maya_data, 'generate_thumbnails.py')
|
||||
application_params['workspace'] = client.file.generate_sas_url(maya_data, 'workspace.mel')
|
||||
application_params['storageURL'] = generate_sas_token(storage_client, maya_data)
|
||||
application_params['frameStart'] = 1
|
||||
application_params['frameEnd'] = 3
|
||||
application_params['frameStep'] = 1
|
||||
application_params['renderer'] = 'arnold'
|
||||
application_params['renderer'] = 'sw'
|
||||
|
||||
batch_parameters['poolInfo'] = pool_info
|
||||
new_job = client.job.jobparameter_from_json(batch_parameters)
|
||||
|
|
Загрузка…
Ссылка в новой задаче