├── scripts
├── modules
│ ├── __init__.py
│ ├── firehawk
│ │ ├── api
│ │ │ ├── __init__.py
│ │ │ ├── menu_visibility.py
│ │ │ ├── post_flight.py
│ │ │ ├── submission_attributes.py
│ │ │ ├── menu_functions.py
│ │ │ ├── startup_456.py
│ │ │ ├── timestamp_submit.py
│ │ │ ├── pdgkvstore.py
│ │ │ ├── submit_logging.py
│ │ │ ├── houpdgkvstore.py
│ │ │ ├── pre_flight.py
│ │ │ ├── output_prep.py
│ │ │ └── create_asset.py
│ │ ├── plugins
│ │ │ └── __init__.py
│ │ └── __init__.py
│ ├── TestPostTask.py
│ ├── mounts.py
│ ├── deadline_pre_job.py
│ ├── firehawk_parms.py
│ ├── trashcan.sh
│ ├── deadline_post_task.py
│ ├── firehawk_plugin_loader.py
│ ├── TestPreTask.py
│ ├── README
│ ├── firehawk_asset_handler.py
│ └── firehawk_read.py
├── ffmpeg_batch
│ ├── batch_to_prores.sh
│ └── batch_to_quicktime.sh
├── s3_sync
│ ├── s3_sync.py
│ └── s3_sync_shell.py
└── menus
│ └── OPmenu.xml
├── toolbar
├── shelf_tool_assets.json
├── openfirehawk
│ ├── node_dict.shelf
│ ├── top_versioning.shelf
│ ├── update_tops_versions.shelf
│ ├── find_locked_nodes.shelf
│ ├── openfirehawk.shelf
│ ├── save_hip.shelf
│ ├── replace_rop_inputs_with_boxes.shelf
│ ├── convert_selection_to_relative_paths.shelf
│ └── version_up_hip.shelf
└── default.shelf
├── hda
├── Top_firehawkdeadlinescheduler_1.0.0.hda
│ ├── houdini.hdalibrary
│ ├── Top_1firehawkdeadlinescheduler
│ │ ├── Help
│ │ ├── Version
│ │ ├── DescriptiveParmName
│ │ ├── Sections.list
│ │ ├── TypePropertiesOptions
│ │ ├── CreateScript
│ │ ├── PythonModule
│ │ ├── Tools.shelf
│ │ ├── ExtraFileOptions
│ │ └── SyncNodeVersion
│ ├── Sections.list
│ └── INDEX__SECTION
├── Top_firehawkdeadlinescheduler_2.0.0.hda
│ ├── houdini.hdalibrary
│ ├── Top_1firehawkdeadlinescheduler_8_82.0.0
│ │ ├── Help
│ │ ├── Version
│ │ ├── DescriptiveParmName
│ │ ├── Sections.list
│ │ ├── TypePropertiesOptions
│ │ ├── CreateScript
│ │ ├── PythonModule
│ │ ├── Tools.shelf
│ │ ├── ExtraFileOptions
│ │ └── SyncNodeVersion
│ ├── Sections.list
│ └── INDEX__SECTION
├── Top_firehawklocalscheduler_1.0.0.hda
│ ├── houdini.hdalibrary
│ ├── Top_1firehawklocalscheduler
│ │ ├── Help
│ │ ├── Version
│ │ ├── DescriptiveParmName
│ │ ├── Sections.list
│ │ ├── SyncNodeVersion
│ │ ├── TypePropertiesOptions
│ │ ├── CreateScript
│ │ ├── ExtraFileOptions
│ │ ├── Tools.shelf
│ │ └── DialogScript
│ ├── Sections.list
│ └── INDEX__SECTION
├── Top_mplay_0.4.0.hda
├── Top_jobseqshot_1.0.0.hda
├── Top_outputprep_0.7.0.hda
├── Top_preflight_0.0.10.hda
├── Sop_read_wedges_5.4.0.hda
├── Sop_read_wedges_5.4.1.hda
├── Sop_read_wedges_5.5.0.hda
├── Top_firehawkformatprep_1.0.0.hda
├── backup
│ ├── Top_outputprep_0.7.0_bak10.hda
│ ├── Top_outputprep_0.7.0_bak11.hda
│ ├── Top_outputprep_0.7.0_bak12.hda
│ ├── Top_outputprep_0.7.0_bak13.hda
│ ├── Top_outputprep_0.7.0_bak8.hda
│ ├── Top_outputprep_0.7.0_bak9.hda
│ ├── Sop_read_wedges_5.5.0_bak15.hda
│ ├── Sop_read_wedges_5.5.0_bak16.hda
│ ├── Sop_read_wedges_5.5.0_bak17.hda
│ ├── Sop_read_wedges_5.5.0_bak18.hda
│ ├── Sop_read_wedges_5.5.0_bak19.hda
│ ├── Sop_read_wedges_5.5.0_bak20.hda
│ └── Top_firehawklocalscheduler_1.0.0_bak1.hda
└── Top_firehawkdeadlinescheduler_2.0.0.hda.zip
├── example_hip
├── firehawk.pdg.versioning.demo.hip
├── firehawk.pdg.versioning.demo.py
└── firehawk.pdg.versioning.demo.sh
├── tests
└── pythonscript_dirname
│ ├── pythonscript_dirname.hip
│ └── description.md
├── .gitignore
├── 456.py
├── firehawk-pdg-tools.json
├── pdg
└── types
│ └── firehawkschedulers
│ ├── __init__.py
│ ├── firehawklocal.py
│ ├── custom_handlers.py
│ └── firehawktbdeadline.py
├── README.md
└── LICENSE
/scripts/modules/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/toolbar/shelf_tool_assets.json:
--------------------------------------------------------------------------------
1 | []
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scripts/modules/firehawk/plugins/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/houdini.hdalibrary:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/houdini.hdalibrary:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/houdini.hdalibrary:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/Top_1firehawklocalscheduler/Help:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/Top_1firehawkdeadlinescheduler/Help:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/Top_1firehawklocalscheduler/Version:
--------------------------------------------------------------------------------
1 | 1
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/Top_1firehawkdeadlinescheduler/Version:
--------------------------------------------------------------------------------
1 | 2
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/Top_1firehawkdeadlinescheduler_8_82.0.0/Help:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/Top_1firehawkdeadlinescheduler_8_82.0.0/Version:
--------------------------------------------------------------------------------
1 | 2
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/Top_1firehawklocalscheduler/DescriptiveParmName:
--------------------------------------------------------------------------------
1 | pdg_workingdir
--------------------------------------------------------------------------------
/scripts/modules/firehawk/__init__.py:
--------------------------------------------------------------------------------
1 | __path__ = __import__('pkgutil').extend_path(__path__, __name__)
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/Top_1firehawkdeadlinescheduler/DescriptiveParmName:
--------------------------------------------------------------------------------
1 | pdg_workingdir
--------------------------------------------------------------------------------
/hda/Top_mplay_0.4.0.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/Top_mplay_0.4.0.hda
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/Top_1firehawkdeadlinescheduler_8_82.0.0/DescriptiveParmName:
--------------------------------------------------------------------------------
1 | pdg_workingdir
--------------------------------------------------------------------------------
/hda/Top_jobseqshot_1.0.0.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/Top_jobseqshot_1.0.0.hda
--------------------------------------------------------------------------------
/hda/Top_outputprep_0.7.0.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/Top_outputprep_0.7.0.hda
--------------------------------------------------------------------------------
/hda/Top_preflight_0.0.10.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/Top_preflight_0.0.10.hda
--------------------------------------------------------------------------------
/hda/Sop_read_wedges_5.4.0.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/Sop_read_wedges_5.4.0.hda
--------------------------------------------------------------------------------
/hda/Sop_read_wedges_5.4.1.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/Sop_read_wedges_5.4.1.hda
--------------------------------------------------------------------------------
/hda/Sop_read_wedges_5.5.0.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/Sop_read_wedges_5.5.0.hda
--------------------------------------------------------------------------------
/hda/Top_firehawkformatprep_1.0.0.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/Top_firehawkformatprep_1.0.0.hda
--------------------------------------------------------------------------------
/hda/backup/Top_outputprep_0.7.0_bak10.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Top_outputprep_0.7.0_bak10.hda
--------------------------------------------------------------------------------
/hda/backup/Top_outputprep_0.7.0_bak11.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Top_outputprep_0.7.0_bak11.hda
--------------------------------------------------------------------------------
/hda/backup/Top_outputprep_0.7.0_bak12.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Top_outputprep_0.7.0_bak12.hda
--------------------------------------------------------------------------------
/hda/backup/Top_outputprep_0.7.0_bak13.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Top_outputprep_0.7.0_bak13.hda
--------------------------------------------------------------------------------
/hda/backup/Top_outputprep_0.7.0_bak8.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Top_outputprep_0.7.0_bak8.hda
--------------------------------------------------------------------------------
/hda/backup/Top_outputprep_0.7.0_bak9.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Top_outputprep_0.7.0_bak9.hda
--------------------------------------------------------------------------------
/hda/backup/Sop_read_wedges_5.5.0_bak15.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Sop_read_wedges_5.5.0_bak15.hda
--------------------------------------------------------------------------------
/hda/backup/Sop_read_wedges_5.5.0_bak16.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Sop_read_wedges_5.5.0_bak16.hda
--------------------------------------------------------------------------------
/hda/backup/Sop_read_wedges_5.5.0_bak17.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Sop_read_wedges_5.5.0_bak17.hda
--------------------------------------------------------------------------------
/hda/backup/Sop_read_wedges_5.5.0_bak18.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Sop_read_wedges_5.5.0_bak18.hda
--------------------------------------------------------------------------------
/hda/backup/Sop_read_wedges_5.5.0_bak19.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Sop_read_wedges_5.5.0_bak19.hda
--------------------------------------------------------------------------------
/hda/backup/Sop_read_wedges_5.5.0_bak20.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Sop_read_wedges_5.5.0_bak20.hda
--------------------------------------------------------------------------------
/example_hip/firehawk.pdg.versioning.demo.hip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/example_hip/firehawk.pdg.versioning.demo.hip
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/Top_firehawkdeadlinescheduler_2.0.0.hda.zip
--------------------------------------------------------------------------------
/tests/pythonscript_dirname/pythonscript_dirname.hip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/tests/pythonscript_dirname/pythonscript_dirname.hip
--------------------------------------------------------------------------------
/hda/backup/Top_firehawklocalscheduler_1.0.0_bak1.hda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/firehawkvfx/firehawk-pdg-tools/HEAD/hda/backup/Top_firehawklocalscheduler_1.0.0_bak1.hda
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | example_hip/backup/*
3 | example_hip/output/*
4 | hda/backup/*
5 | tests/*/backup/*
6 | tests/*/output/*
7 |
8 | *.bgeo.sc
9 | *.jpg
10 | *_bak*.hip
11 |
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/Sections.list:
--------------------------------------------------------------------------------
1 | ""
2 | INDEX__SECTION INDEX_SECTION
3 | houdini.hdalibrary houdini.hdalibrary
4 | Top_1firehawklocalscheduler Top/firehawklocalscheduler
5 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/Sections.list:
--------------------------------------------------------------------------------
1 | ""
2 | INDEX__SECTION INDEX_SECTION
3 | houdini.hdalibrary houdini.hdalibrary
4 | Top_1firehawkdeadlinescheduler Top/firehawkdeadlinescheduler
5 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/Sections.list:
--------------------------------------------------------------------------------
1 | ""
2 | INDEX__SECTION INDEX_SECTION
3 | houdini.hdalibrary houdini.hdalibrary
4 | Top_1firehawkdeadlinescheduler_8_82.0.0 Top/firehawkdeadlinescheduler::2.0.0
5 |
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/menu_visibility.py:
--------------------------------------------------------------------------------
1 |
2 | def reload_pdg_visibility(kwargs):
3 | import hou
4 | node = kwargs["node"]
5 | category = node.type().category().name()
6 | if category != 'Top':
7 | return False
8 | return True
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/post_flight.py:
--------------------------------------------------------------------------------
1 | import firehawk_plugin_loader
2 | firehawk_logger = firehawk_plugin_loader.module_package('submit_logging').submit_logging.FirehawkLogger()
3 |
4 | def graph_complete(hip_name):
5 | firehawk_logger.info('End Cooking Graph: {}'.format(hip_name))
--------------------------------------------------------------------------------
/example_hip/firehawk.pdg.versioning.demo.py:
--------------------------------------------------------------------------------
1 | # This file is used in the .sh script to test cooking with preflight.
2 |
3 | import hou
4 | node=hou.node('/obj/sop_geo_process/topnet1/output0')
5 |
6 | import firehawk_plugin_loader
7 | firehawk_plugin_loader.module_package('pre_flight').pre_flight.Preflight( node ).cook()
--------------------------------------------------------------------------------
/example_hip/firehawk.pdg.versioning.demo.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This is an automated cook test.
4 |
5 | SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" # The directory of this script
6 |
7 | FH_VAR_DEBUG_PDG=10 hython $SCRIPTDIR/firehawk.pdg.versioning.demo.hip $SCRIPTDIR/firehawk.pdg.versioning.demo.py
--------------------------------------------------------------------------------
/toolbar/openfirehawk/node_dict.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
--------------------------------------------------------------------------------
/toolbar/openfirehawk/top_versioning.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
--------------------------------------------------------------------------------
/456.py:
--------------------------------------------------------------------------------
1 | ### If you have more than one 456.py you will need to copy the contents of this into you main 456.py
2 |
3 | import os
4 | # Optional import
5 | firehawk_root = os.getenv("FIREHAWK_PDG_TOOLS_ROOT")
6 | if firehawk_root and os.path.isdir(firehawk_root):
7 | import firehawk_plugin_loader
8 | firehawk_plugin_loader.module_package('startup_456').startup_456.init()
--------------------------------------------------------------------------------
/toolbar/openfirehawk/update_tops_versions.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/Top_1firehawklocalscheduler/Sections.list:
--------------------------------------------------------------------------------
1 | ""
2 | DialogScript DialogScript
3 | CreateScript CreateScript
4 | TypePropertiesOptions TypePropertiesOptions
5 | Help Help
6 | Tools.shelf Tools.shelf
7 | Version Version
8 | DescriptiveParmName DescriptiveParmName
9 | SyncNodeVersion SyncNodeVersion
10 | ExtraFileOptions ExtraFileOptions
11 |
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/Top_1firehawklocalscheduler/SyncNodeVersion:
--------------------------------------------------------------------------------
1 | node = kwargs['node']
2 | oldversion_str = kwargs['old_version']
3 |
4 | if oldversion_str == '':
5 | oldversion = 0
6 | else:
7 | oldversion = int(oldversion_str)
8 |
9 | if oldversion < 1:
10 | # Old hip - set Mapping Mode to None
11 | node.parm('pdg_mapmode').set(1)
12 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/Top_1firehawkdeadlinescheduler/Sections.list:
--------------------------------------------------------------------------------
1 | ""
2 | DialogScript DialogScript
3 | CreateScript CreateScript
4 | TypePropertiesOptions TypePropertiesOptions
5 | Help Help
6 | Tools.shelf Tools.shelf
7 | Version Version
8 | DescriptiveParmName DescriptiveParmName
9 | PythonModule PythonModule
10 | SyncNodeVersion SyncNodeVersion
11 | ExtraFileOptions ExtraFileOptions
12 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/Top_1firehawkdeadlinescheduler_8_82.0.0/Sections.list:
--------------------------------------------------------------------------------
1 | ""
2 | DialogScript DialogScript
3 | CreateScript CreateScript
4 | TypePropertiesOptions TypePropertiesOptions
5 | Help Help
6 | Tools.shelf Tools.shelf
7 | Version Version
8 | DescriptiveParmName DescriptiveParmName
9 | PythonModule PythonModule
10 | SyncNodeVersion SyncNodeVersion
11 | ExtraFileOptions ExtraFileOptions
12 |
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/Top_1firehawklocalscheduler/TypePropertiesOptions:
--------------------------------------------------------------------------------
1 | CheckExternal := 1;
2 | ContentsCompressionType := 1;
3 | ForbidOutsideParms := 1;
4 | GzipContents := 1;
5 | LockContents := 1;
6 | MakeDefault := 1;
7 | ParmsFromVfl := 0;
8 | PrefixDroppedParmLabel := 0;
9 | PrefixDroppedParmName := 0;
10 | SaveCachedCode := 0;
11 | SaveIcon := 1;
12 | SaveSpareParms := 0;
13 | UnlockOnCreate := 0;
14 | UseDSParms := 1;
15 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/Top_1firehawkdeadlinescheduler/TypePropertiesOptions:
--------------------------------------------------------------------------------
1 | CheckExternal := 1;
2 | ContentsCompressionType := 1;
3 | ForbidOutsideParms := 1;
4 | GzipContents := 1;
5 | LockContents := 1;
6 | MakeDefault := 1;
7 | ParmsFromVfl := 0;
8 | PrefixDroppedParmLabel := 0;
9 | PrefixDroppedParmName := 0;
10 | SaveCachedCode := 0;
11 | SaveIcon := 1;
12 | SaveSpareParms := 0;
13 | UnlockOnCreate := 0;
14 | UseDSParms := 1;
15 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/Top_1firehawkdeadlinescheduler_8_82.0.0/TypePropertiesOptions:
--------------------------------------------------------------------------------
1 | CheckExternal := 1;
2 | ContentsCompressionType := 1;
3 | ForbidOutsideParms := 1;
4 | GzipContents := 1;
5 | LockContents := 1;
6 | MakeDefault := 1;
7 | ParmsFromVfl := 0;
8 | PrefixDroppedParmLabel := 0;
9 | PrefixDroppedParmName := 0;
10 | SaveCachedCode := 0;
11 | SaveIcon := 1;
12 | SaveSpareParms := 0;
13 | UnlockOnCreate := 0;
14 | UseDSParms := 1;
15 |
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/Top_1firehawklocalscheduler/CreateScript:
--------------------------------------------------------------------------------
1 | # Automatically generated script
2 | \set noalias = 1
3 | #
4 | # Creation script for firehawklocalscheduler operator
5 | #
6 |
7 | if ( "$arg1" == "" ) then
8 | echo This script is intended as a creation script
9 | exit
10 | endif
11 |
12 | # Node $arg1 (Top/firehawklocalscheduler)
13 | opexprlanguage -s hscript $arg1
14 | opuserdata -n '___Version___' -v '1' $arg1
15 |
--------------------------------------------------------------------------------
/scripts/modules/TestPostTask.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import traceback
4 | import json
5 |
6 | from Deadline.Scripting import *
7 | from Deadline.Plugins import *
8 |
9 | def __main__( *args ):
10 | deadlinePlugin = args[0]
11 | job = deadlinePlugin.GetJob()
12 |
13 | deadlinePlugin.LogInfo("In Test Post Task!")
14 |
15 | task = deadlinePlugin.GetCurrentTask()
16 |
17 |
18 | deadlinePlugin.LogInfo("Finished Test Post Task!")
19 |
20 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/Top_1firehawkdeadlinescheduler/CreateScript:
--------------------------------------------------------------------------------
1 | # Automatically generated script
2 | \set noalias = 1
3 | #
4 | # Creation script for firehawkdeadlinescheduler operator
5 | #
6 |
7 | if ( "$arg1" == "" ) then
8 | echo This script is intended as a creation script
9 | exit
10 | endif
11 |
12 | # Node $arg1 (Top/firehawkdeadlinescheduler)
13 | opexprlanguage -s hscript $arg1
14 | opuserdata -n '___Version___' -v '2' $arg1
15 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/Top_1firehawkdeadlinescheduler_8_82.0.0/CreateScript:
--------------------------------------------------------------------------------
1 | # Automatically generated script
2 | \set noalias = 1
3 | #
4 | # Creation script for firehawkdeadlinescheduler::2.0.0 operator
5 | #
6 |
7 | if ( "$arg1" == "" ) then
8 | echo This script is intended as a creation script
9 | exit
10 | endif
11 |
12 | # Node $arg1 (Top/firehawkdeadlinescheduler::2.0.0)
13 | opexprlanguage -s hscript $arg1
14 | opuserdata -n '___Version___' -v '2' $arg1
15 |
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/INDEX__SECTION:
--------------------------------------------------------------------------------
1 | Operator: firehawklocalscheduler
2 | Label: Firehawk Local Scheduler
3 | Path: oplib:/Top/firehawklocalscheduler?Top/firehawklocalscheduler
4 | Icon: TOP_localscheduler
5 | Table: Top
6 | License:
7 | Extra: pdgtype='scheduler' subtype=firehawklocalscheduler
8 | User:
9 | Inputs: 0 to 0
10 | Outputs: 0
11 | Subnet: false
12 | Python: false
13 | Empty: false
14 | Modified: Sat Jun 26 17:37:51 2021
15 |
16 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/INDEX__SECTION:
--------------------------------------------------------------------------------
1 | Operator: firehawkdeadlinescheduler
2 | Label: Firehawk Deadline Scheduler
3 | Path: oplib:/Top/firehawkdeadlinescheduler?Top/firehawkdeadlinescheduler
4 | Icon: TOP_deadlinescheduler
5 | Table: Top
6 | License:
7 | Extra: pdgtype='scheduler' subtype=firehawkdeadlinescheduler
8 | User:
9 | Inputs: 0 to 0
10 | Outputs: 0
11 | Subnet: false
12 | Python: false
13 | Empty: false
14 | Modified: Mon Oct 18 15:20:13 2021
15 |
16 |
--------------------------------------------------------------------------------
/scripts/ffmpeg_batch/batch_to_prores.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Convert a bunch of mov / mp4 files in the current dir to prores format for editting in an NLE (FCP X, Davinci Resolve etc).
4 | # not great for large numbers of files that will max out cores.
5 | # Consider this post for thread limiting https://gist.github.com/Brainiarc7/2afac8aea75f4e01d7670bc2ff1afad1
6 |
7 | shopt -s nullglob;
8 | for file in "$arg"*.{mov,mp4,MOV,MP4} ; do
9 | echo "convert $file"
10 | ffmpeg -i "$file" -vcodec prores -acodec pcm_s16le "prores_${file%.*}.mov" /dev/null 2>&1 &
11 | done
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/INDEX__SECTION:
--------------------------------------------------------------------------------
1 | Operator: firehawkdeadlinescheduler::2.0.0
2 | Label: Firehawk Deadline Scheduler
3 | Path: oplib:/Top/firehawkdeadlinescheduler::2.0.0?Top/firehawkdeadlinescheduler::2.0.0
4 | Icon: TOP_deadlinescheduler
5 | Table: Top
6 | License:
7 | Extra: pdgtype='scheduler' subtype=firehawkdeadlinescheduler visibleoutputs=0
8 | User:
9 | Inputs: 0 to 0
10 | Outputs: 0
11 | Subnet: false
12 | Python: false
13 | Empty: false
14 | Modified: Tue Oct 10 22:24:12 2023
15 |
16 |
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/submission_attributes.py:
--------------------------------------------------------------------------------
1 | # work items can attach attribute values to the submitObject.ord_dict, a plain dictionary that may be used in a submission pipeline.
2 |
3 | def get_submission_attributes():
4 | submission_attributes = {
5 | 'job': {'var_data_type': 'string' },
6 | 'asset_type': {'var_data_type': 'string' },
7 | 'format': {'var_data_type': 'string' },
8 | 'volatile': {'var_data_type': 'int' },
9 | 'res': {'var_data_type': 'string' },
10 | 'animating_frames': {'var_data_type': 'string' }
11 | }
12 | return submission_attributes
13 |
14 |
15 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/Top_1firehawkdeadlinescheduler/PythonModule:
--------------------------------------------------------------------------------
1 | def submitGraphAsJob(kwargs, node):
2 | from pdg.scheduler import submitGraphAsJob
3 | submitGraphAsJob(kwargs, node, 'Deadline')
4 |
5 | def validateForceReloadPlugin(kwargs, node):
6 | """
7 | When PDGMQ Server As Task is False, also set
8 | the Force Reload Plugin to False.
9 | """
10 | mq_server_task = node.parm('deadline_pdgmqastask').eval()
11 | force_reload_plugin = node.parm('deadline_forcereloadplugin').eval()
12 | if force_reload_plugin == 1 and mq_server_task == 0:
13 | node.parm('deadline_forcereloadplugin').set(0)
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/Top_1firehawkdeadlinescheduler_8_82.0.0/PythonModule:
--------------------------------------------------------------------------------
1 | def submitGraphAsJob(kwargs, node):
2 | from pdg.scheduler import submitGraphAsJob
3 | submitGraphAsJob(kwargs, node, 'Deadline')
4 |
5 | def validateForceReloadPlugin(kwargs, node):
6 | """
7 | When PDGMQ Server As Task is False, also set
8 | the Force Reload Plugin to False.
9 | """
10 | mq_server_task = node.parm('deadline_pdgmqastask').eval()
11 | force_reload_plugin = node.parm('deadline_forcereloadplugin').eval()
12 | if force_reload_plugin == 1 and mq_server_task == 0:
13 | node.parm('deadline_forcereloadplugin').set(0)
--------------------------------------------------------------------------------
/scripts/modules/mounts.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | import os
4 |
5 | class mounts():
6 | def __init__(self):
7 | mount_paths = {}
8 |
9 | def get_mounts(self):
10 | mount_paths = {}
11 | for l in file('/proc/mounts'):
12 | if l[0] == '/':
13 | l_split = l.split()
14 | mount_paths[l_split[1]] = l_split[0]
15 | elif 'nfs4' in l:
16 | l_split = l.split()
17 | mount_paths[l_split[1]] = l_split[0]
18 | return mount_paths
19 |
20 | def check_mounted(self, path):
21 | mounted = False
22 | if path in self.get_mounts():
23 | mounted = True
24 | return mounted
--------------------------------------------------------------------------------
/toolbar/openfirehawk/find_locked_nodes.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/toolbar/openfirehawk/openfirehawk.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/Top_1firehawklocalscheduler/ExtraFileOptions:
--------------------------------------------------------------------------------
1 | {
2 | "SyncNodeVersion/Cursor":{
3 | "type":"intarray",
4 | "value":[10,1]
5 | },
6 | "SyncNodeVersion/IsExpr":{
7 | "type":"bool",
8 | "value":false
9 | },
10 | "SyncNodeVersion/IsPython":{
11 | "type":"bool",
12 | "value":true
13 | },
14 | "SyncNodeVersion/IsScript":{
15 | "type":"bool",
16 | "value":true
17 | },
18 | "SyncNodeVersion/Source":{
19 | "type":"string",
20 | "value":""
21 | },
22 | "ViewerStateModule/CodeGenInput":{
23 | "type":"string",
24 | "value":"{\n\t\"state_name\":\"\",\n\t\"state_label\":\"\",\n\t\"state_descr\":\"\",\n\t\"state_icon\":\"$HH/config/Icons\",\n\t\"state_debug_menu\":0,\n\t\"state_sample\":0,\n\t\"state_handler_indices\":[]\n}\n"
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/scripts/modules/deadline_pre_job.py:
--------------------------------------------------------------------------------
1 | print 'PRE JOB SCRIPT'
2 |
3 | import logging
4 |
5 |
6 | formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
7 |
8 | def setup_logger(name, log_file, level=logging.INFO):
9 | """Function setup as many loggers as you want"""
10 |
11 | handler = logging.FileHandler(log_file)
12 | handler.setFormatter(formatter)
13 |
14 | logger = logging.getLogger(name)
15 | logger.setLevel(level)
16 | logger.addHandler(handler)
17 |
18 | return logger
19 |
20 | def __main__(one, two):
21 | print 'main pre job'
22 | print dir(one)
23 | print dir(two)
24 | # first file logger
25 | logger = setup_logger('first_logger', '/var/tmp/logfile_pre_job.log')
26 | logger.info('PRE JOB SCRIPT')
27 |
28 | logger = setup_logger('first_logger', '/var/tmp/logfile_pre_job.log')
29 | logger.info('PRE JOB SCRIPT OUT OF MAIN')
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/Top_1firehawklocalscheduler/Tools.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
10 | TOP
11 |
12 |
13 | $HDA_TABLE_AND_NAME
14 |
15 | Schedulers
16 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/Top_1firehawkdeadlinescheduler/Tools.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
10 | TOP
11 |
12 |
13 | $HDA_TABLE_AND_NAME
14 |
15 | Schedulers
16 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/Top_1firehawkdeadlinescheduler_8_82.0.0/Tools.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
10 | TOP
11 |
12 |
13 | $HDA_TABLE_AND_NAME
14 |
15 | Schedulers
16 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/firehawk-pdg-tools.json:
--------------------------------------------------------------------------------
1 | {
2 | "env" : [
3 | {
4 | "FIREHAWK_PDG_TOOLS" : "$HOUDINI_PACKAGE_PATH/firehawk-pdg-tools"
5 | },
6 | {
7 | "PYTHONPATH" : {
8 | "method" : "append",
9 | "value" : ["$FIREHAWK_PDG_TOOLS/scripts/modules"]
10 | }
11 | },
12 | {
13 | "HOUDINI_SCRIPT_PATH" : {
14 | "method" : "prepend",
15 | "value" : ["$FIREHAWK_PDG_TOOLS"]
16 | }
17 | },
18 | {
19 | "HOUDINI_PATH" : {
20 | "method" : "prepend",
21 | "value" : ["$FIREHAWK_PDG_TOOLS"]
22 | }
23 | },
24 | {
25 | "HOUDINI_MENU_PATH" : {
26 | "method" : "prepend",
27 | "value" : ["$FIREHAWK_PDG_TOOLS/scripts/menus"]
28 | }
29 | },
30 | {
31 | "HOUDINI_OTLSCAN_PATH" : {
32 | "method" : "prepend",
33 | "value" : ["$FIREHAWK_PDG_TOOLS/hda"]
34 | }
35 | }
36 | ]
37 | }
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/menu_functions.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | def reload_pdg():
4 | print('...Reload PDG modules')
5 | import pdg
6 |
7 | types = pdg.TypeRegistry.types()
8 | rt = types.registeredType( pdg.registeredType.Scheduler, "localscheduler" )
9 | rt.reload()
10 | # rt = types.registeredType( pdg.registeredType.Scheduler, "firehawklocalscheduler" )
11 | # rt.reload()
12 |
13 | import firehawk_submit as firehawk_submit
14 | from importlib import reload
15 | reload( firehawk_submit )
16 | import firehawk_dynamic_versions as firehawk_dynamic_versions
17 | reload( firehawk_dynamic_versions )
18 | # import firehawk.plugins
19 | # reload(firehawk.plugins)
20 | # import firehawk.api
21 | # reload(firehawk.api)
22 |
23 | # import firehawk_plugin_loader
24 | # output_prep = firehawk_plugin_loader.module_package('output_prep')
25 | # reload(output_prep)
26 |
27 | # import firehawk_plugin_loader
28 | # import firehawk.plugins
29 | # import firehawk.api
30 | # plugin_modules, api_modules=firehawk_plugin_loader.load_plugins(reload_module=True) # load modules in the firehawk.api namespace
31 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/Top_1firehawkdeadlinescheduler/ExtraFileOptions:
--------------------------------------------------------------------------------
1 | {
2 | "PythonModule/Cursor":{
3 | "type":"intarray",
4 | "value":[5,1]
5 | },
6 | "PythonModule/IsExpr":{
7 | "type":"bool",
8 | "value":false
9 | },
10 | "PythonModule/IsPython":{
11 | "type":"bool",
12 | "value":true
13 | },
14 | "PythonModule/IsScript":{
15 | "type":"bool",
16 | "value":true
17 | },
18 | "PythonModule/Source":{
19 | "type":"string",
20 | "value":""
21 | },
22 | "SyncNodeVersion/Cursor":{
23 | "type":"intarray",
24 | "value":[71,88]
25 | },
26 | "SyncNodeVersion/IsExpr":{
27 | "type":"bool",
28 | "value":false
29 | },
30 | "SyncNodeVersion/IsPython":{
31 | "type":"bool",
32 | "value":true
33 | },
34 | "SyncNodeVersion/IsScript":{
35 | "type":"bool",
36 | "value":true
37 | },
38 | "SyncNodeVersion/Source":{
39 | "type":"string",
40 | "value":""
41 | },
42 | "ViewerStateModule/CodeGenInput":{
43 | "type":"string",
44 | "value":"{\n\t\"state_name\":\"\",\n\t\"state_label\":\"\",\n\t\"state_descr\":\"\",\n\t\"state_icon\":\"$HH/config/Icons\",\n\t\"state_debug_menu\":0,\n\t\"state_sample\":0,\n\t\"state_handler_indices\":[]\n}\n"
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/scripts/ffmpeg_batch/batch_to_quicktime.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Convert a bunch of mov / mp4 files in the current dir to quciktime compatible mp4
4 | # not great for large numbers of files that will max out ram/cores, but will get the job done quicker than one by one, and great if you have a machine with a high core count and gobs of ram.
5 | # Consider this post for thread limiting https://gist.github.com/Brainiarc7/2afac8aea75f4e01d7670bc2ff1afad1
6 |
7 | pass the pattern to match as a string '*.mp4'
8 |
9 | shopt -s nullglob;
10 |
11 | args=$arg
12 | match=$1
13 |
14 | list () {
15 | for file in "$args"$match ; do
16 | echo "will convert ./$file"
17 | done
18 | }
19 |
20 | convert () {
21 | echo 'converting files in background'
22 | mkdir -p mp4
23 | for file in "$args"$match ; do
24 | echo "convert ./$file"
25 | ffmpeg -i "$file" -vcodec h264 -acodec aac -pix_fmt yuv420p "mp4_${file%.*}.mp4" /dev/null 2>&1 &
26 | # this is used to background output to shell when threading multiple files - /dev/null 2>&1 &
27 | done
28 | }
29 |
30 | list
31 |
32 | echo "Do you wish to batch convert the listed files?"
33 | select yn in "Yes" "No"; do
34 | case $yn in
35 | Yes ) convert; break;;
36 | No ) exit;;
37 | esac
38 | done
--------------------------------------------------------------------------------
/toolbar/default.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/Top_1firehawkdeadlinescheduler_8_82.0.0/ExtraFileOptions:
--------------------------------------------------------------------------------
1 | {
2 | "PythonModule/Cursor":{
3 | "type":"intarray",
4 | "value":[5,1]
5 | },
6 | "PythonModule/IsExpr":{
7 | "type":"bool",
8 | "value":false
9 | },
10 | "PythonModule/IsPython":{
11 | "type":"bool",
12 | "value":true
13 | },
14 | "PythonModule/IsScript":{
15 | "type":"bool",
16 | "value":true
17 | },
18 | "PythonModule/Source":{
19 | "type":"string",
20 | "value":""
21 | },
22 | "SyncNodeVersion/Cursor":{
23 | "type":"intarray",
24 | "value":[71,88]
25 | },
26 | "SyncNodeVersion/IsExpr":{
27 | "type":"bool",
28 | "value":false
29 | },
30 | "SyncNodeVersion/IsPython":{
31 | "type":"bool",
32 | "value":true
33 | },
34 | "SyncNodeVersion/IsScript":{
35 | "type":"bool",
36 | "value":true
37 | },
38 | "SyncNodeVersion/Source":{
39 | "type":"string",
40 | "value":""
41 | },
42 | "ViewerHandleModule/CodeGenInput":{
43 | "type":"string",
44 | "value":"{\n\t\"handle_name\":\"\",\n\t\"handle_label\":\"\",\n\t\"handle_descr\":\"\",\n\t\"handle_icon\":\"$HH/config/Icons\",\n\t\"handle_debug_menu\":0,\n\t\"handle_sample\":0,\n\t\"handle_handler_indices\":[]\n}\n"
45 | },
46 | "ViewerStateModule/CodeGenInput":{
47 | "type":"string",
48 | "value":"{\n\t\"state_name\":\"\",\n\t\"state_label\":\"\",\n\t\"state_descr\":\"\",\n\t\"state_icon\":\"$HH/config/Icons\",\n\t\"state_debug_menu\":0,\n\t\"state_sample\":0,\n\t\"state_handler_indices\":[]\n}\n"
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/scripts/modules/firehawk_parms.py:
--------------------------------------------------------------------------------
1 | def output_nodes_kv(): # to a avoid hou calls in python generate blocks, we construct a dictionary with the required info to aid stability
2 | import json
3 | import hou
4 |
5 | node_list = []
6 | parent = hou.pwd().parent()
7 |
8 | kv={}
9 | for parm in parent.parm('output_nodes0').multiParmInstances():
10 | for node in parm.evalAsNodes():
11 |
12 | child_rops = [ x for x in node.children() if x.type().name()=='ropfetch' ]
13 |
14 | override_target = node.path()
15 | override_target_type_name = node.type().name()
16 | if len(child_rops) > 0: # if there is a rop nested, target that instead
17 | override_target = child_rops[0].path()
18 | override_target_type_name = child_rops[0].path().type().name()
19 |
20 | new_kv = {
21 | node.path() : {
22 | "output_topnode_path" : node.path(), # this may be a parent node containing others, ie matnra top/
23 | "output_topnode_type_name" : node.type().name(),
24 | "child_rops" : child_rops,
25 | "override_target" : override_target, # this is the actual node doing the workload, ie the rop fetch inside a mantra top.
26 | "override_target_type_name" : override_target_type_name,
27 | "override_origin_path" : hou.pwd().path()
28 | }
29 | }
30 | kv.update( new_kv )
31 |
32 | return json.dumps(kv)
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/startup_456.py:
--------------------------------------------------------------------------------
1 | def init():
2 | print("Running firehawk startup_456.py")
3 | import os
4 | import hou
5 | import time
6 | import traceback
7 |
8 | import firehawk_plugin_loader
9 | debug_default = firehawk_plugin_loader.resolve_debug_default()
10 |
11 | if debug_default: print( '456.py: ...Pull versions from sidecar object to multiparms.' )
12 | try:
13 | import firehawk_dynamic_versions
14 | # pull_all_versions_to_all_multiparms will search for an attribute (or optionally a sidecar file ending in .json that matches the name of the hip file). If a match is found, the data contained will allow parameters to be updated on load.
15 | # This allows dynamically created versions to be updated to all known current versions to the scheduler at the time the hip is being loaded elsewhere like on farm processes.
16 | # for example, if 10 wedges are on an input, and they all had new versions in this submission, the the output would need to know all 10 wedge versions if they were to be combined, so we load those versions as current as possible.
17 | use_json_file = hou.isUIAvailable()
18 | firehawk_dynamic_versions.versions().pull_all_versions_to_all_multiparms( check_hip_matches_submit=True, use_json_file=use_json_file ) # Only update multiparms if this is a farm submission hip file. If no work item reoslves, use the json file to pull versions.
19 | if debug_default: print( '456.py: Done.' )
20 | except:
21 | traceback.print_exc()
22 | if debug_default: print('No PDG dynamic versions were imported')
--------------------------------------------------------------------------------
/pdg/types/firehawkschedulers/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | from . import firehawklocal
4 | from . import custom_handlers
5 | from . import firehawktbdeadline
6 |
7 | import firehawk_plugin_loader
8 | firehawk_logger = firehawk_plugin_loader.module_package('submit_logging').submit_logging.FirehawkLogger()
9 |
10 | import pdg
11 | exceptions = []
12 | tag_list = pdg.TypeRegistry.types().tags
13 |
14 | def registerTypes(type_registry):
15 | print("Register types for Firehawk Schedulers...")
16 |
17 | for tag in tag_list:
18 | if tag in exceptions:
19 | firehawk_logger.debug('Simple handler for tag {}'.format(tag))
20 | type_registry.registerCacheHandler(tag, custom_handlers.simple_handler)
21 | else:
22 | firehawk_logger.debug('Custom handler for tag {}'.format(tag))
23 | type_registry.registerCacheHandler(tag, custom_handlers.custom_handler)
24 |
25 | firehawk_logger.debug("Registering firehawklocalscheduler for H18.5")
26 | type_registry.registerScheduler(firehawklocal.FirehawkLocalScheduler, label="Firehawk Local Scheduler")
27 | firehawk_logger.debug("Registering firehawkdeadlinescheduler for H18.5")
28 | type_registry.registerScheduler(firehawktbdeadline.FirehawkDeadlineScheduler, label="Firehawk Deadline Scheduler")
29 | firehawk_logger.debug("Done registering firehawk schedulers.")
30 | firehawk_logger.debug("Registering script viewer")
31 | type_registry.addTag("file/firehawk/log")
32 | type_registry.addExtensionTag(".sh", "file/firehawk/log")
33 | # type_registry.addTagViewer("file/firehawk/log", "pluma") # Tested pluma/nedit. No vscode. TODO: find suitable handling for os
--------------------------------------------------------------------------------
/toolbar/openfirehawk/save_hip.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/timestamp_submit.py:
--------------------------------------------------------------------------------
1 |
2 | # Modules that are called to timestamp a hip file for submission
3 |
4 | import os
5 | import datetime as dt
6 |
7 | import firehawk_plugin_loader
8 | firehawk_logger = firehawk_plugin_loader.module_package('submit_logging').submit_logging.FirehawkLogger(debug=10)
9 |
10 | def datetime_object_str_to_object(datetime_object_str): # return string as datetime object
11 | datetime_object = dt.datetime.strptime(datetime_object_str, "%Y-%m-%d %H:%M:%S.%f") # ensure the dt object successfuly converts to string and back for consistency.
12 | return datetime_object
13 |
14 | def update_timestamp(): # return datetime object as string
15 | datetime_object = dt.datetime.now()
16 | datetime_object_str = str( datetime_object )
17 | datetime_object = datetime_object_str_to_object(datetime_object_str) # ensure the string safely converts back to a datetime object, since it will be read from user data.
18 | return datetime_object, datetime_object_str
19 |
20 | def ensure_dir_exists(directory ):
21 | if not os.path.isdir( directory ):
22 | ### Need to create dir here
23 | firehawk_logger.debugLog( "Creating Directory: {}".format( directory ) )
24 | os.makedirs( directory )
25 | if not os.path.isdir( directory ): # check that it actually got created
26 | firehawk_logger.debugLog( "Error creating dir: {}".format( directory ) )
27 | return
28 |
29 | def timestamp_submission_str(datetime_object, dir, hip_basename): # return a timestamped hip file and a custom formated string from the timestamp
30 |
31 | timestamp_str = datetime_object.strftime("%Y-%m-%d.%H-%M-%S-%f") # append a time of submission, with microseconds.
32 |
33 | # hip_name = "{dir}/submit/{base}.{date}.hip".format( dir=dir, base=hip_basename, date=timestamp_str)
34 | # ensure_dir_exists( os.path.dirname( hip_name ) )
35 |
36 | return timestamp_str
37 |
38 |
39 |
--------------------------------------------------------------------------------
/toolbar/openfirehawk/replace_rop_inputs_with_boxes.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
54 |
55 |
56 |
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/pdgkvstore.py:
--------------------------------------------------------------------------------
1 | import pdg
2 | import os, sys
3 | import json
4 | from os.path import sep, join
5 |
6 | def pjoin(*args, **kwargs):
7 | return join(*args, **kwargs).replace(sep, '/') # for windows compatibility.
8 |
9 | def convert_to_valid_key(name):
10 | name = name.replace("-", "_")
11 | name = name.replace(".", "_")
12 | name = name.replace("/", "_")
13 | name = name.replace("\\", "_")
14 |
15 | return name
16 |
17 | def work_item_db_put(key, value, graph=None): # This method's contents should be replaced with a database call instead of using the filesystem. value should be a dict, even if it is just a single value.
18 | # key should be of form job/seq/shot/element/variant/version/index
19 | # write to /tmp/log/pdg/pdgkvstore/workitems/job/seq/shot/element/variant/version/index
20 |
21 | try:
22 | if not isinstance( value, dict ):
23 | raise Exception('Error: value must be a dictionary')
24 |
25 | if graph:
26 | key = convert_to_valid_key(key)
27 | # print("Set graph [{}] attrib [{}] value [{}]".format(graph, key, value))
28 | # if not new_item.graph.attribValue("version"):
29 | with graph.lockAttributes() as owner:
30 | owner.setDictAttrib(key, value)
31 | return
32 |
33 | raise ValueError("Graph object required")
34 | except Exception as e:
35 | print("### EXCEPTION work_item_db_put() ###")
36 | print(str(e))
37 |
38 | def work_item_db_get(key, graph=None): # This method's contents should be replaced with a database call in stead of using the filesystem. value should be a dict, even if it is just a single value.
39 | # key should be of form job/seq/shot/element/variant/version/index
40 | # write to /tmp/log/pdg/pdgkvstore/workitems/job/seq/shot/element/variant/version/index
41 |
42 | if graph:
43 | key = convert_to_valid_key(key)
44 | value = graph.dictAttribValue(key).asDictionary()
45 | # print("Retrieve graph [{}] attrib [{}] value [{}]".format(graph, key, value))
46 | return value
47 |
48 | raise ValueError("Graph object required")
--------------------------------------------------------------------------------
/scripts/modules/trashcan.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # trash everything below the current path that does not have a .protect file in the folder. it should normally only be run from the folder such as 'job/seq/shot/cache' to trash all data below this path.
4 | # see opmenu and firehawk_submit.py for tools to add protect files based on a top net tree for any given hip file.
5 | # after creating .protect files in cache paths you wish to keep, we would generally run this script in the root cache or render output path since it will create a trash can there.
6 |
7 | argument="$1"
8 |
9 | echo ""
10 | ARGS=''
11 |
12 | if [[ -z $argument ]] ; then
13 | echo "DRY RUN. To move files to trash, use argument -m after reviewing the exclude_list.txt and you are sure it lists everything you wish to protect from being moved to the trash."
14 | echo ""
15 | ARGS1='--remove-source-files'
16 | ARGS2='--dry-run'
17 | else
18 | case $argument in
19 | -m|--move)
20 | echo "MOVING FILES TO TRASH."
21 | echo ""
22 | ARGS1='--remove-source-files'
23 | ARGS2=''
24 | ;;
25 | *)
26 | raise_error "Unknown argument: ${argument}"
27 | return
28 | ;;
29 | esac
30 | fi
31 |
32 | current_dir=$(pwd)
33 | echo "current dir $current_dir"
34 | base_dir=$(pwd | cut -d/ -f1-2)
35 | echo "base_dir $base_dir"
36 |
37 |
38 | source=$(realpath --relative-to=$base_dir $current_dir)/
39 | echo "source $source"
40 | target=trash/
41 | echo "target $target"
42 |
43 | # ensure trash exists at base dir.
44 | mkdir -p $base_dir/$target
45 | echo ""
46 | echo "Build exclude_list.txt contents with directories containing .protect files"
47 | find . -name .protect -print0 |
48 | while IFS= read -r -d '' line; do
49 | path=$(realpath --relative-to=. "$line")
50 | dirname $path
51 | done > exclude_list.txt
52 |
53 | path_to_list=$(realpath --relative-to=. exclude_list.txt)
54 | echo $path_to_list >> exclude_list.txt
55 |
56 | cat exclude_list.txt
57 |
58 | cd $base_dir
59 |
60 | # run this command from the drive root, eg /prod.
61 | rsync -a $ARGS1 --prune-empty-dirs --inplace --relative --exclude-from="$current_dir/exclude_list.txt" --include='*' --include='*/' $source $target $ARGS2 -v
62 | cd $current_dir
63 |
--------------------------------------------------------------------------------
/toolbar/openfirehawk/convert_selection_to_relative_paths.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
49 |
50 |
51 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Firehawk PDG Tools
2 |
3 | For Houdini 18.5.596 and higher. This has been tested on Ubuntu 18.04, CentOS 7, MacOS 11.4, and Windows 10
4 |
5 | Firehawk PDG tools is an implementation for PDG enabling common required abilities for production. It has been used and contributed to by Rising Sun Pictures and Stormborn Studios.
6 |
7 | # Installation
8 |
9 | - Ensure the "packages" folder exists in the user houdini folder. The user houdini folder is different depending on your OS. You may need to create the 'packages' folder if it doesn't already exist. eg:
10 | ```
11 | Linux:
12 | ~/houdini18.5/packages
13 | Mac OS:
14 | ~/Library/Preferences/houdini/18.5/packages
15 | Windows:
16 | $HOME/Downloads/houdini/18.5/packages
17 | ```
18 |
19 | - Place the entire firehawk-pdg-tools folder in the `packages` folder.
20 | - Copy the firehawk-pdg-tools.json directly into the `packages` folder. Json files in packages are read here and load specified package contents.
21 |
22 | From the packages folder, the tree should look like this:
23 | ```
24 | packages % tree -L 1
25 | .
26 | ├── firehawk-pdg-tools
27 | └── firehawk-pdg-tools.json
28 | ```
29 |
30 | - If you wish to show debugging information for submission, use this env var before you load houdini:
31 | ```
32 | FH_VAR_DEBUG_PDG=10
33 | ```
34 |
35 | # Features:
36 |
37 | - Auto versioning of directories / paths when new cooks occur.
38 | - Wedging large numbers of output across shots, elements and variations - everything is a wedge.
39 | - Timestamped immutable hip files to reproduce submissions.
40 | - Suitable hooks to modify asset requests if required (eg. if you use a database for your assets)
41 | - Python plugin architecture allowing customisation for studio specific requirements, like DB requests for new assets.
42 | - An example clone of the local scheduler implementing the Firehawk submit class. It's also possible to apply these tools to any scheduler in their onschedule callback.
43 |
44 | # Demo
45 |
46 | - Open the firehawk.pdg.versioning.demo.hip file
47 | - Right click on /obj/sop_geo_process/topnet1/ropfetch_flipbook, and select 'Cook with Preflight'
48 |
49 | # Other Notes:
50 |
51 | - If python scripts are executed in a plain shell on a farm outside of the current houdini process, ensure this command has the PYTHONPATH env var set to include the same path defined in firehawk-pdg-tool.json. Normally with the local scheduler this isn't necesary, since PYTHONPATH has already been set by the package with firehawk-pdg-tool.json. This is especially relevant for a process like PDGMQ, which wont be executed inside houdini and therefore wouldn't have access to the firehawk-pdg-tools libraries, which could be required.
52 |
--------------------------------------------------------------------------------
/scripts/modules/deadline_post_task.py:
--------------------------------------------------------------------------------
1 | print 'POST TASK SCRIPT'
2 |
3 | import os
4 | import sys
5 | import json
6 | import re
7 |
8 | from Deadline.Scripting import *
9 | from Deadline.Plugins import *
10 |
11 | hfs_env=os.environ['HFS']
12 | print 'hfs_env:', hfs_env
13 | houpythonlib = os.path.join(hfs_env, 'houdini/python2.7libs')
14 | print 'houpythonlib:', houpythonlib
15 | sys.path.append(houpythonlib)
16 |
17 | # pythonpath = os.environ['PYTHONPATH']
18 | # print 'pythonpath', pythonpath
19 |
20 | sys.path.append('/usr/lib64/python2.7/site-packages')
21 | sys.path.append('/home/deadlineuser/.local/lib/python2.7/site-packages')
22 | sys.path.append('/usr/lib/python2.7/site-packages')
23 |
24 | # import pdgcmd
25 |
26 | def __main__( *args ):
27 |
28 | print 'main post task'
29 |
30 | deadlinePlugin = args[0]
31 | job = deadlinePlugin.GetJob()
32 |
33 | deadlinePlugin.LogInfo(str(args))
34 |
35 | # for item in args:
36 | # deadlinePlugin.LogInfo(str(item))
37 |
38 | deadlinePlugin.LogInfo("In Test Task!")
39 |
40 | for item in dir(job):
41 |
42 | deadlinePlugin.LogInfo(str(item))
43 | if 'get' in str(item).lower():
44 | try:
45 | method = 'job.'+str(item)+'()'
46 | test = eval(method)
47 | deadlinePlugin.LogInfo(str(test))
48 | except:
49 | test = None
50 |
51 | deadlinePlugin.LogInfo("job.GetOutputFileNamesForTask()")
52 | deadlinePlugin.LogInfo(str(job.GetOutputFileNamesForTask))
53 | deadlinePlugin.LogInfo(str(job.GetOutputFileNamesForTask()))
54 | #deadlinePlugin.LogInfo(dir(job.GetOutputFileNamesForTask()))
55 |
56 | # for item in job:
57 | # deadlinePlugin.LogInfo(str(item))
58 |
59 |
60 | # if hfs_env:
61 | # # Append $PYTHONPATH if not set
62 | # houdini_python_libs = houpythonlib
63 | # python_path = GetProcessEnvironmentVariable('PYTHONPATH')
64 | # if python_path:
65 | # if not houdini_python_libs in python_path:
66 | # python_path.append(path_combine + houdini_python_libs)
67 | # else:
68 | # python_path = houdini_python_libs
69 |
70 | # LogInfo('Setting PYTHONPATH: {}'.format(python_path))
71 | # SetProcessEnvironmentVariable('PYTHONPATH', python_path)
72 | # itemname = None
73 | # callbackserver = None
74 |
75 | # try:
76 | # if not itemname:
77 | # itemname = os.environ['PDG_ITEM_NAME']
78 | # print 'itemname', itemname
79 | # if not callbackserver:
80 | # callbackserver = os.environ['PDG_RESULT_SERVER']
81 | # except KeyError as exception:
82 | # print "ERROR: {} must be in environment or specified via argument flag.".format(exception.message)
83 | # exit(1)
84 | # print 'get work item'
85 | # work_item = WorkItem(getWorkItemJsonPath(itemname))
86 |
87 | # print 'dir work_item', work_item
--------------------------------------------------------------------------------
/scripts/s3_sync/s3_sync.py:
--------------------------------------------------------------------------------
1 |
2 | debug = False
3 | if debug:
4 | boto3.set_stream_logger('')
5 |
6 |
7 | import os
8 | import sys
9 |
10 | sys.path.append('/usr/lib64/python2.7/site-packages')
11 | home_site_packages = os.path.expanduser('~/.local/lib/python2.7/site-packages')
12 | sys.path.append(home_site_packages)
13 | sys.path.append('/usr/lib/python2.7/site-packages')
14 |
15 | from awscli.clidriver import create_clidriver
16 | import boto3
17 |
18 | class syncfile():
19 | def __init__(self, fullpath='', bucketname=''):
20 | self.fullpath = fullpath
21 |
22 | self.dirname = os.path.split(self.fullpath)[0]
23 | self.filename = os.path.split(self.fullpath)[1]
24 |
25 | self.bucketname = bucketname
26 | self.bucketdirname = 's3://' + self.bucketname + os.path.split(self.fullpath)[0]
27 |
28 | self.s3_client = boto3.client('s3')
29 | self.s3_resource = boto3.resource('s3')
30 | self.s3_client_result = None
31 |
32 | self.s3_args = []
33 |
34 | self.force = False
35 | self.quiet = False
36 | self.ignore_errors = False
37 | self.pushed = False
38 | self.pulled = False
39 |
40 | def aws_cli(self, *cmd):
41 | self.old_env = dict(os.environ)
42 | try:
43 |
44 | # Environment
45 | self.env = os.environ.copy()
46 | self.env['LC_CTYPE'] = u'en_US.UTF'
47 | os.environ.update(self.env)
48 |
49 | # Run awscli in the same process
50 | self.exit_code = create_clidriver().main(*cmd)
51 |
52 | # Deal with problems
53 | if (self.exit_code > 0) and (not self.ignore_errors) and (self.exit_code != 2):
54 | raise RuntimeError('AWS CLI exited with code {}'.format(self.exit_code))
55 | finally:
56 | os.environ.clear()
57 | os.environ.update(self.old_env)
58 |
59 | def local_push(self):
60 | if self.pushed==False:
61 | print 'upload', self.fullpath
62 | if self.force:
63 | self.s3_client_reesult = self.s3_client.upload_file(self.fullpath, self.bucketname, self.fullpath)
64 | # upload to s3 with boto is prefereable to the cli. However the cli proivdes the sync function below which will only transfer if the files don't match which is a better default behaviour.
65 | else:
66 | self.s3_args = ['s3', 'sync', self.dirname, self.bucketdirname, '--exclude', '*', '--include', self.filename]
67 | if self.quiet:
68 | self.s3_args.append('--quiet')
69 | print 'args', self.s3_args
70 | self.cli_operation = self.aws_cli( self.s3_args )
71 | self.pushed = True
72 |
73 | def local_pull(self):
74 | if self.pulled==False:
75 | print 'download', self.fullpath
76 | if self.force:
77 | self.s3_client_result = self.s3_client.download_file(self.bucketname, self.fullpath, self.fullpath)
78 | else:
79 | self.s3_args = ['s3', 'sync', self.bucketdirname, self.dirname, '--exclude', '*', '--include', self.filename]
80 | if self.quiet:
81 | self.s3_args.append('--quiet')
82 | print 'args', self.s3_args
83 | self.cli_operation = self.aws_cli( self.s3_args )
84 | self.pulled = True
85 |
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/submit_logging.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 |
4 | def resolve_debug_default():
5 | if os.getenv('FH_VAR_DEBUG_PDG', '0') in tuple( [str(x) for x in range(11)] ):
6 | debug_default = int( os.getenv('FH_VAR_DEBUG_PDG', '0') ) # if numeric, then use number up to 10.
7 | else:
8 | debug_default = int( (os.getenv('FH_VAR_DEBUG_PDG', 'false').lower() in ('true', 'yes')) ) # resolve env var as int
9 | return debug_default
10 |
11 | debug_default = resolve_debug_default()
12 |
13 | class FirehawkLogger():
14 | def __init__(self, debug=debug_default, logger_object=None, start_time=None):
15 | self.debug_verbosity = debug
16 |
17 | if start_time is None:
18 | start_time = time.time()
19 |
20 | self.start_time = start_time # Start time is used to continue tracking of a log time
21 | self.last_time = None
22 |
23 | self.logger_object = logger_object # allows you to pass a custom logger object
24 |
25 | def initLogger(self, logger=None, log_level=None):
26 | """
27 | optional user method to init logger
28 | """
29 | return
30 |
31 | def set_verbosity(self, debug):
32 | self.debug_verbosity = debug
33 |
34 | def timed_info(self, start_time=None, label=''): # provides time passed since start time and time since last running of this method.
35 | if start_time is not None:
36 | self.start_time = start_time
37 | else:
38 | start_time = self.start_time
39 | if self.last_time is None:
40 | self.last_time = start_time
41 |
42 | message = "--- {} seconds --- Passed during Pre Submit --- {} seconds --- {}".format( '%.4f' % (time.time() - start_time), '%.4f' % (time.time() - self.last_time), label )
43 | self.info( message )
44 | self.last_time = time.time()
45 |
46 | def timed_debug(self, message, start_time=None): # provides time passed since start time and time since last running of this method.
47 | if start_time is not None:
48 | self.start_time = start_time
49 | else:
50 | start_time = self.start_time
51 | if self.last_time is None:
52 | self.last_time = start_time
53 |
54 | message = "--- {} seconds --- Passed during Pre Submit --- {} seconds --- {}".format( '%.4f' % (time.time() - start_time), '%.4f' % (time.time() - self.last_time), message )
55 | self.info( message )
56 | self.last_time = time.time()
57 |
58 | def debug(self, message):
59 | if self.logger_object is not None and hasattr( self.logger_object, 'debug' ):
60 | self.logger_object.debug( message )
61 | else:
62 | if self.debug_verbosity>=10: print( message )
63 |
64 | def info(self, message):
65 | if self.logger_object is not None and hasattr( self.logger_object, 'info' ):
66 | self.logger_object.info( message )
67 | else:
68 | if self.debug_verbosity>=5: print( message )
69 |
70 | def warning(self, message):
71 | if self.logger_object is not None and hasattr( self.logger_object, 'warning' ):
72 | self.logger_object.warning( message )
73 | else:
74 | print( message )
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/houpdgkvstore.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | pdg_process = os.getenv('PDG_process', 0)
4 |
5 | if not pdg_process: # PDG may load similar python libraries to houdini while outside of a houdini context. eg in the pdgmq python process on the farm. when that happens, we don't want to import hou, so we only conditionally load these modules when outside houdini.
6 | import pdg, hou, firehawk_read
7 | import firehawk_plugin_loader
8 | pdgkvstore = firehawk_plugin_loader.module_package('pdgkvstore').pdgkvstore
9 | firehawk_logger = firehawk_plugin_loader.module_package('submit_logging').submit_logging.FirehawkLogger()
10 |
11 | # This retrieves the kvstore for a given work item by reading parameters evaluated for that work item (like the version). It is seperate to pdgkvstore because it requires hou, which should be isolated from pdg where possible.
12 |
13 | def getPdgKvStore(work_item): # Attempt to get previous job id data for a work item to retrieve a log. this will depend on version parameters defined by hou nodes.
14 | subindex = work_item.batchIndex # schedulers only ever operate on single work items or batch parents. This should usually return -1 in this scenario. if this is used in scenarios other than on a scheduler, we should aquire the batch parent if work item has one.
15 | kwargs = {}
16 |
17 | required_components = ['job', 'seq', 'shot', 'element', 'variant']
18 |
19 | kwargs = {}
20 | for key in required_components:
21 | firehawk_logger.debug( 'Retrieve parm/attrib: {} for work_item: {}'.format( key, work_item ))
22 | value = firehawk_read.getLiveParmOrAttribValue(work_item, key, type='string')
23 | if value is None:
24 | return None
25 | kv = { key: value }
26 | firehawk_logger.debug( 'update kv: {}'.format( kv ) )
27 | kwargs.update( kv )
28 |
29 | firehawk_logger.debug('kwargs: {}'.format( kwargs ))
30 |
31 | # Get the version for the work item by evaluating the version prameter on the node.
32 |
33 | if firehawk_read.get_is_exempt_from_hou_node_path(work_item): # kv store for work items without versions / hou nodes is not presently supported.
34 | return None
35 |
36 | version_db_hou_node_path = firehawk_read.get_version_db_hou_node_path(work_item=work_item)
37 | firehawk_logger.debug( "version_db_hou_node_path: {}".format( version_db_hou_node_path ) )
38 | version_db_hou_node = hou.node( version_db_hou_node_path )
39 |
40 | json_object = None
41 | version_parm = version_db_hou_node.parm('version')
42 | if version_parm is not None:
43 | with work_item.makeActive(): version_int = version_parm.eval()
44 | version_str = 'v'+str( version_int ).zfill(3)
45 | kwargs['version'] = version_str
46 |
47 | key = '{}/{}/{}/{}/{}/{}/{}'.format( kwargs['job'], kwargs['seq'], kwargs['shot'], kwargs['element'], kwargs['variant'], kwargs['version'], subindex )
48 | firehawk_logger.debug('work_item_db_get key: {}'.format( key ) )
49 | json_object = pdgkvstore.work_item_db_get(key, graph=work_item.graph)
50 |
51 | return json_object # if an attribute couldn't be aquired, will return none. if kvstore retrieval file didn't exist or failed, will return empty dict.
--------------------------------------------------------------------------------
/tests/pythonscript_dirname/description.md:
--------------------------------------------------------------------------------
1 | Description of problems:
2 |
3 | 1.
4 |
5 | Work item fails on /obj/sop_geo_process/topnet1/mplay1/dirname2:
6 | ```
7 | Error
8 | mplay1_dirname2_18016 failed to run script: Traceback (most recent call last):
9 | File "mplay1_dirname2_script", line 14, in
10 | IndexError: list index out of range
11 | ```
12 |
13 | Steps to reproduce:
14 |
15 | - open the hip file
16 | - right click on /obj/sop_geo_process/topnet1/mplay1
17 | - select 'cook with preflight'
18 |
19 | The issue is somewhat intermittent, so you may have to delete all the written output and cook again to reproduce this.
20 |
21 | Houdini Version:
22 | 18.5.596
23 |
24 | OS:
25 | Ubuntu 18.04
26 |
27 | 2.
28 |
29 | Open GL ROP will not render with Intel Iris Graphics - Macbook Pro
30 |
31 | 3.
32 |
33 | Items that cook have repeated operations where parms are set. setting a parm if it already matches wastes more time than checking the value and setting it conditionally. eg, these are repeated operations that occur every frame and shouldn't occur.
34 |
35 | 22:17:59.393: Requesting sub item 7 for batch ropfetch4_2316
36 | 22:17:59.396: Successfully loaded sub item ropfetch4_2324
37 | 22:17:59.396: Setting batch sub index to 7
38 | 22:17:59.396: Setting channels for wedge attrib "obj_sop_geo_process_dust_hip"
39 | 22:17:59.397: Setting value for "obj_sop_geo_process_dust_hip" with type "Parameter Value"
40 | 22:17:59.397: Parm "/obj/sop_geo_process/dust/hip" not found
41 | 22:17:59.397: Setting channels for wedge attrib "obj_sop_geo_process_dust_shot"
42 | 22:17:59.397: Setting value for "obj_sop_geo_process_dust_shot" with type "Parameter Value"
43 | 22:17:59.397: Setting parm "/obj/sop_geo_process/dust/shot" at index "0" to value "shot0010"
44 | 22:17:59.397: Setting channels for wedge attrib "obj_sop_geo_process_dust_seq"
45 | 22:17:59.397: Setting value for "obj_sop_geo_process_dust_seq" with type "Parameter Value"
46 | 22:17:59.397: Setting parm "/obj/sop_geo_process/dust/seq" at index "0" to value "seq0010"
47 | 22:17:59.397: Setting channels for wedge attrib "obj_sop_geo_process_dust_index_key"
48 | 22:17:59.397: Setting value for "obj_sop_geo_process_dust_index_key" with type "Parameter Value"
49 | 22:17:59.397: Parm "/obj/sop_geo_process/dust/index_key" not found
50 | 22:17:59.397: Setting channels for wedge attrib "obj_sop_geo_process_dust_variant"
51 | 22:17:59.397: Setting value for "obj_sop_geo_process_dust_variant" with type "Parameter Value"
52 | 22:17:59.397: Setting parm "/obj/sop_geo_process/dust/variant" at index "0" to value "wedge0"
53 | 22:17:59.397: Setting channels for wedge attrib "obj_sop_geo_process_dust_element"
54 | 22:17:59.397: Setting value for "obj_sop_geo_process_dust_element" with type "Parameter Value"
55 | 22:17:59.397: Setting parm "/obj/sop_geo_process/dust/element" at index "0" to value "sphere-dust"
56 | 22:17:59.397: Setting channels for wedge attrib "obj_sop_geo_process_dust_index_key_expanded"
57 | 22:17:59.397: Setting value for "obj_sop_geo_process_dust_index_key_expanded" with type "Parameter Value"
58 | 22:17:59.397: Parm "/obj/sop_geo_process/dust/index_key_expanded" not found
59 | 22:17:59.397: Setting channels for wedge attrib "obj_sop_geo_process_dust_index_key_unexpanded"
60 | 22:17:59.397: Setting value for "obj_sop_geo_process_dust_index_key_unexpanded" with type "Parameter Value"
61 | 22:17:59.397: Parm "/obj/sop_geo_process/dust/index_key_unexpanded" not found
62 | 22:17:59.397: Setting channels for wedge attrib "obj_sop_geo_process_dust_version"
63 | 22:17:59.397: Setting value for "obj_sop_geo_process_dust_version" with type "Parameter Value"
64 | 22:17:59.397: Setting parm "/obj/sop_geo_process/dust/version" at index "0" to value "2"
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_2.0.0.hda/Top_1firehawkdeadlinescheduler_8_82.0.0/SyncNodeVersion:
--------------------------------------------------------------------------------
1 | try:
2 | old_ver = int(kwargs.get('old_version'))
3 | except:
4 | old_ver = 0
5 |
6 | def copyParameter(p_dst, p_src):
7 | if len(p_src.keyframes()) > 0:
8 | p_dst.setKeyframes(p_src.keyframes())
9 | elif p_src.parmTemplate().dataType() == hou.parmData.String:
10 | p_dst.set(p_src.unexpandedString())
11 | else:
12 | p_dst.set(p_src.eval())
13 |
14 | def parameterRenamed(node, oldName, newName):
15 | p_src = node.parm(oldName)
16 | if p_src != None:
17 | p_src = node.parm(oldName)
18 | p_dst = node.parm(newName)
19 | copyParameter(p_dst, p_src)
20 |
21 | def multiparmRenamed(node, oldFolderName, newFolderName, children):
22 | p_src = node.parm(oldFolderName)
23 | if p_src != None:
24 | p_src = node.parm(oldFolderName)
25 | p_dst = node.parm(newFolderName)
26 |
27 | num = p_src.eval()
28 | p_dst.set(num)
29 |
30 | for x in range(1, num + 1):
31 | for oldChildName,newChildName in children:
32 | parameterRenamed(node, oldChildName + str(x), newChildName + str(x))
33 |
34 | if old_ver < 2:
35 | node = kwargs['node']
36 | parameterRenamed(node, 'deadline_job_name', 'deadline_jobname')
37 | parameterRenamed(node, 'deadline_job_comment', 'deadline_jobcomment')
38 | parameterRenamed(node, 'deadline_job_dept', 'deadline_jobdept')
39 |
40 | parameterRenamed(node, 'deadline_job_batch_name', 'deadline_jobbatchname')
41 | parameterRenamed(node, 'deadline_job_pool', 'deadline_jobpool')
42 | parameterRenamed(node, 'deadline_job_group', 'deadline_jobgroup')
43 | parameterRenamed(node, 'deadline_job_priority', 'deadline_jobpriority')
44 | parameterRenamed(node, 'deadline_concurrent_tasks', 'deadline_concurrenttasks')
45 | parameterRenamed(node, 'deadline_pre_job_script', 'deadline_prejobscript')
46 | parameterRenamed(node, 'deadline_post_job_script', 'deadline_postjobscript')
47 | parameterRenamed(node, 'deadline_machine_limit', 'deadline_machinelimit')
48 | parameterRenamed(node, 'deadline_machine_list', 'deadline_machinelist')
49 | parameterRenamed(node, 'deadline_machine_list_black', 'deadline_machinelistblack')
50 | parameterRenamed(node, 'deadline_on_job_complete', 'deadline_onjobcomplete')
51 |
52 | parameterRenamed(node, 'deadline_verbose_log', 'deadline_verboselog')
53 | parameterRenamed(node, 'deadline_pdgmq_use_ip', 'deadline_pdgmquseip')
54 | parameterRenamed(node, 'deadline_pdgmq_as_task', 'deadline_pdgmqastask')
55 | parameterRenamed(node, 'deadline_force_reload_plugin', 'deadline_forcereloadplugin')
56 | parameterRenamed(node, 'deadline_launch_monitor', 'deadline_launchmonitor')
57 |
58 | parameterRenamed(node, 'deadline_max_submit_tasks', 'deadline_maxsubmittasks')
59 | parameterRenamed(node, 'deadline_max_check_tasks', 'deadline_maxchecktasks')
60 |
61 | parameterRenamed(node, 'deadline_override_repo', 'deadline_overriderepo')
62 | parameterRenamed(node, 'deadline_connection_type', 'deadline_connectiontype')
63 | parameterRenamed(node, 'deadline_plugin_directory', 'deadline_plugindirectory')
64 |
65 | parameterRenamed(node, 'deadline_pre_task_script', 'deadline_pretaskscript')
66 | parameterRenamed(node, 'deadline_post_task_script', 'deadline_posttaskscript')
67 |
68 | multiparmRenamed(node, 'deadline_jobfile_kvpair', 'deadline_jobfilekvpair', \
69 | [('deadline_jobfile_key', 'deadline_jobfilekey'), ('deadline_jobfile_value', 'deadline_jobfilevalue')])
70 |
71 | multiparmRenamed(node, 'deadline_pluginfile_kvpair', 'deadline_pluginfilekvpair', \
72 | [('deadline_pluginfile_key', 'deadline_pluginfilekey'), ('deadline_pluginfile_value', 'deadline_pluginfilevalue')])
73 |
--------------------------------------------------------------------------------
/hda/Top_firehawkdeadlinescheduler_1.0.0.hda/Top_1firehawkdeadlinescheduler/SyncNodeVersion:
--------------------------------------------------------------------------------
1 | try:
2 | old_ver = int(kwargs.get('old_version'))
3 | except:
4 | old_ver = 0
5 |
6 | def copyParameter(p_dst, p_src):
7 | if len(p_src.keyframes()) > 0:
8 | p_dst.setKeyframes(p_src.keyframes())
9 | elif p_src.parmTemplate().dataType() == hou.parmData.String:
10 | p_dst.set(p_src.unexpandedString())
11 | else:
12 | p_dst.set(p_src.eval())
13 |
14 | def parameterRenamed(node, oldName, newName):
15 | p_src = node.parm(oldName)
16 | if p_src != None:
17 | p_src = node.parm(oldName)
18 | p_dst = node.parm(newName)
19 | copyParameter(p_dst, p_src)
20 |
21 | def multiparmRenamed(node, oldFolderName, newFolderName, children):
22 | p_src = node.parm(oldFolderName)
23 | if p_src != None:
24 | p_src = node.parm(oldFolderName)
25 | p_dst = node.parm(newFolderName)
26 |
27 | num = p_src.eval()
28 | p_dst.set(num)
29 |
30 | for x in xrange(1, num + 1):
31 | for oldChildName,newChildName in children:
32 | parameterRenamed(node, oldChildName + str(x), newChildName + str(x))
33 |
34 | if old_ver < 2:
35 | node = kwargs['node']
36 | parameterRenamed(node, 'deadline_job_name', 'deadline_jobname')
37 | parameterRenamed(node, 'deadline_job_comment', 'deadline_jobcomment')
38 | parameterRenamed(node, 'deadline_job_dept', 'deadline_jobdept')
39 |
40 | parameterRenamed(node, 'deadline_job_batch_name', 'deadline_jobbatchname')
41 | parameterRenamed(node, 'deadline_job_pool', 'deadline_jobpool')
42 | parameterRenamed(node, 'deadline_job_group', 'deadline_jobgroup')
43 | parameterRenamed(node, 'deadline_job_priority', 'deadline_jobpriority')
44 | parameterRenamed(node, 'deadline_concurrent_tasks', 'deadline_concurrenttasks')
45 | parameterRenamed(node, 'deadline_pre_job_script', 'deadline_prejobscript')
46 | parameterRenamed(node, 'deadline_post_job_script', 'deadline_postjobscript')
47 | parameterRenamed(node, 'deadline_machine_limit', 'deadline_machinelimit')
48 | parameterRenamed(node, 'deadline_machine_list', 'deadline_machinelist')
49 | parameterRenamed(node, 'deadline_machine_list_black', 'deadline_machinelistblack')
50 | parameterRenamed(node, 'deadline_on_job_complete', 'deadline_onjobcomplete')
51 |
52 | parameterRenamed(node, 'deadline_verbose_log', 'deadline_verboselog')
53 | parameterRenamed(node, 'deadline_pdgmq_use_ip', 'deadline_pdgmquseip')
54 | parameterRenamed(node, 'deadline_pdgmq_as_task', 'deadline_pdgmqastask')
55 | parameterRenamed(node, 'deadline_force_reload_plugin', 'deadline_forcereloadplugin')
56 | parameterRenamed(node, 'deadline_launch_monitor', 'deadline_launchmonitor')
57 |
58 | parameterRenamed(node, 'deadline_max_submit_tasks', 'deadline_maxsubmittasks')
59 | parameterRenamed(node, 'deadline_max_check_tasks', 'deadline_maxchecktasks')
60 |
61 | parameterRenamed(node, 'deadline_override_repo', 'deadline_overriderepo')
62 | parameterRenamed(node, 'deadline_connection_type', 'deadline_connectiontype')
63 | parameterRenamed(node, 'deadline_plugin_directory', 'deadline_plugindirectory')
64 |
65 | parameterRenamed(node, 'deadline_pre_task_script', 'deadline_pretaskscript')
66 | parameterRenamed(node, 'deadline_post_task_script', 'deadline_posttaskscript')
67 |
68 | multiparmRenamed(node, 'deadline_jobfile_kvpair', 'deadline_jobfilekvpair', \
69 | [('deadline_jobfile_key', 'deadline_jobfilekey'), ('deadline_jobfile_value', 'deadline_jobfilevalue')])
70 |
71 | multiparmRenamed(node, 'deadline_pluginfile_kvpair', 'deadline_pluginfilekvpair', \
72 | [('deadline_pluginfile_key', 'deadline_pluginfilekey'), ('deadline_pluginfile_value', 'deadline_pluginfilevalue')])
--------------------------------------------------------------------------------
/scripts/s3_sync/s3_sync_shell.py:
--------------------------------------------------------------------------------
1 | # Syncronise work items to or from s3.
2 |
3 | # Example
4 | # python /home/deadlineuser/houdini17.5/scripts/s3_sync/s3_sync_shell.py --file "/prod/tst/s3sync/upload/cache/sphere/v014/tst.s3sync.upload.uploadtest.sphere.v014.w0.*.bgeo.sc" --direction "push" --bucket "man.firehawkfilm.com"
5 | # Changed files will take precedence
6 | # If no data has changed, then no download/upload will take place.
7 | # You must have aws cli installed and run aws configure to setup with your secret key.
8 |
9 | import os, sys, argparse
10 |
11 | parser = argparse.ArgumentParser()
12 | parser.add_argument('-f', '--file', type=str, help='file path')
13 | parser.add_argument('-d', '--direction', type=str, help='direction: push/pull')
14 | parser.add_argument('-b', '--bucket', type=str, help='bucket: mys3bucket.example.com')
15 | parser.add_argument('-p', '--pdg', type=str, help='pdg command: True/False')
16 |
17 | _args, other_args = parser.parse_known_args()
18 | file = _args.file
19 | direction = _args.direction
20 | bucket = _args.bucket
21 | if _args.pdg:
22 | pdg_command = _args.pdg
23 | print_log = False
24 | else:
25 | pdg_command = False
26 | print_log = True
27 |
28 | if print_log:
29 | print "sync", file
30 |
31 |
32 | import logging
33 | formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
34 |
35 | def setup_logger(name, log_file, level=logging.INFO):
36 | """Function setup as many loggers as you want"""
37 |
38 | handler = logging.FileHandler(log_file)
39 | handler.setFormatter(formatter)
40 |
41 | logger = logging.getLogger(name)
42 | logger.setLevel(level)
43 | logger.addHandler(handler)
44 |
45 | return logger
46 |
47 | # first file logger
48 | logger = setup_logger('first_logger', '/var/tmp/pre_job_logfile.log')
49 | logger.info('start log')
50 |
51 | # second file logger
52 | super_logger = setup_logger('second_logger', '/var/tmp/second_logfile.log')
53 |
54 | sys.path.append('/usr/lib64/python2.7/site-packages')
55 | home_site_packages = os.path.expanduser('~/.local/lib/python2.7/site-packages')
56 | sys.path.append(home_site_packages)
57 | sys.path.append('/usr/lib/python2.7/site-packages')
58 | s3_sync_path = os.environ['FIREHAWK_PDG_TOOLS'] + '/scripts/s3_sync'
59 | sys.path.append(s3_sync_path)
60 | #sys.path.append('/home/deadlineuser/houdini17.5/scripts/s3_sync')
61 |
62 | if direction != 'push' and direction != 'pull':
63 | logger.info('error no push/pull direction selected')
64 | super_logger.error('error no push/pull direction selected')
65 | sys.exit('error no push/pull direction selected')
66 |
67 | ### import s3_sync and aws cli libraries to push and pull from AWS S3
68 |
69 | from awscli.clidriver import create_clidriver
70 | import s3_sync as s3
71 |
72 | display_output = True
73 |
74 | syncfiles=[]
75 | syncfiles.append(
76 | s3.syncfile(file,bucket)
77 | )
78 |
79 | for index, syncfile in enumerate(syncfiles):
80 | if direction=='push':
81 | if display_output: logger.info("push sync file %s %s up" % (syncfile.dirname, syncfile.filename))
82 | syncfile.local_push()
83 | elif direction=='pull':
84 | if display_output: logger.info("pull sync file %s %s down" % (syncfile.dirname, syncfile.filename))
85 | syncfile.local_pull()
86 |
87 | #return 'complete'
88 |
89 | # clone upstream workitem data. this is for within the pdg context.
90 | # import pdgcmd
91 |
92 | # itemname = None
93 | # callbackserver = None
94 |
95 | # try:
96 | # if not itemname:
97 | # itemname = os.environ['PDG_ITEM_NAME']
98 | # if not callbackserver:
99 | # callbackserver = os.environ['PDG_RESULT_SERVER']
100 |
101 | # except KeyError as exception:
102 | # print "ERROR: {} must be in environment or specified via argument flag.".format(exception.message)
103 | # exit(1)
104 |
105 | # work_item = WorkItem(getWorkItemJsonPath(itemname))
106 |
107 | # print work_item.inputResults
108 |
109 | # for result in work_item.inputResults[0]:
110 | # pdgcmd.reportResultData(result['data'], work_item.name, callbackserver, result['tag'])
--------------------------------------------------------------------------------
/toolbar/openfirehawk/version_up_hip.shelf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
113 |
114 |
115 |
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/pre_flight.py:
--------------------------------------------------------------------------------
1 | from functools import partial
2 | import os, sys
3 |
4 | import firehawk_submit
5 | import firehawk_plugin_loader
6 |
7 | debug_default = firehawk_plugin_loader.resolve_debug_default()
8 | firehawk_logger = firehawk_plugin_loader.module_package('submit_logging').submit_logging.FirehawkLogger(debug=debug_default)
9 | post_flight = firehawk_plugin_loader.module_package('post_flight').post_flight
10 |
11 | houdini_version = float( '.'.join( os.environ['HOUDINI_VERSION'].split('.')[0:2] ) )
12 | houdini_minor_version = int( os.environ['HOUDINI_VERSION'].split('.')[2] )
13 |
14 | def pull_all_versions_to_all_multiparms(exec_in_main_thread=False, use_json_file=True):
15 | import firehawk_dynamic_versions
16 | firehawk_logger.info( '...Ensuring versions are current from previous submission')
17 | # submit = firehawk_submit.submit( hou.node('/') )
18 | firehawk_dynamic_versions.versions().pull_all_versions_to_all_multiparms( check_hip_matches_submit=False, exec_in_main_thread=exec_in_main_thread, use_json_file=True ) # Ensure any versions generated in the last submisison are in the current hip.
19 | firehawk_logger.info('...Versions finished sync before saving hip.')
20 |
21 | def init_firehawk_topnet_parms(parent):
22 | import firehawk_dynamic_versions, firehawk_read
23 |
24 | top_net_path = firehawk_read.get_parent_top_net(parent).path()
25 | firehawk_dynamic_versions.versions().init_firehawk_topnet_parms( top_net_path )
26 |
27 | def update_parms( pdg_context, taskgraph_file, hip_name, exec_in_main_thread, handler, event):
28 | firehawk_logger.info('...update_parms event handler')
29 | # handler.removeFromAllEmitters() # ??? this might be hanging
30 | # firehawk_logger.info('...Running post_flight.graph_complete.')
31 | post_flight.graph_complete( hip_name ) # you may wish to perform an operation on the hip asset after completion of the graph.
32 | firehawk_logger.info( '...Updating Parms/Versions in this Hip session. exec_in_main_thread: {}'.format( exec_in_main_thread ) )
33 | pull_all_versions_to_all_multiparms(exec_in_main_thread=exec_in_main_thread, use_json_file=True) # we update parms last, but since this is coming from a callback, we needs to use hou's ability to execute in the main thread.
34 | firehawk_logger.info('...Finished update_parms in event handler.')
35 |
36 | class Preflight(): # This class will be used for assigning and using preflight nodes and cooking the main graph. reloads may not work on this SESI code.
37 | def __init__(self, node=None, debug=None, logger_object=None):
38 | self.node = node
39 | self.selected_nodes = []
40 | self.submit = None
41 |
42 | def set_update_workitems(self, node):
43 | # submitObject = firehawk_submit.submit( debug=self._verbose, logger_object=None )
44 | parent_top_net = self.submit.get_parent_top_net(node)
45 |
46 | if (houdini_minor_version >= 465 and houdini_version == 18.5) or houdini_version >= 19.0:
47 | regenerationtype_parm = parent_top_net.parm('regenerationtype')
48 | if regenerationtype_parm and regenerationtype_parm.evalAsInt() != 1: regenerationtype_parm.set(1)
49 | else:
50 | firehawk_logger.warning( "...Warning: This houdini build {}.{} is < 18.5.465. Setting 'Update Work Items Only' on TOP Net is not possible.".format( houdini_version, houdini_minor_version ) )
51 |
52 | def cook(self, use_preflight_node=False):
53 | import hou, pdg
54 |
55 | exec_in_main_thread=True
56 | if not hou.isUIAvailable():
57 | exec_in_main_thread=False
58 |
59 | node = self.node
60 | hou.setContextOption("last_pdg_cook", node.path())
61 | # ensure firehawk parms exist on topnet
62 | init_firehawk_topnet_parms(node.parent())
63 |
64 | self.submit = firehawk_submit.submit( node )
65 |
66 | # pull_all_versions_to_all_multiparms(use_json_file=True) # we are in the ui thread already, no need to exec in main thread.
67 | self.set_update_workitems(node)
68 |
69 | firehawk_logger.info('...Save_hip_for_submission')
70 | hip_name, taskgraph_file = self.submit.save_hip_for_submission(set_user_data=True, preflight=False, exec_in_main_thread=False) # Snapshot a time stamped hip file for cooking runs. Must be used for all running tasks, not the live hip file in the current session
71 |
72 | # use a handler to save the graph on completion.
73 | if node is None: return
74 | pdg_context = node.getPDGGraphContext()
75 | if pdg_context is None: return
76 | firehawk_logger.info('...AddEventHandler update_parms_partial')
77 | update_parms_partial = partial(update_parms, pdg_context, taskgraph_file, hip_name, exec_in_main_thread)
78 | if not pdg_context.hasEventHandler(update_parms_partial):
79 | pdg_context.addEventHandler(update_parms_partial, pdg.EventType.CookComplete, True) # This should save the graph in any event that it stops, including if it is an error.
80 | firehawk_logger.info('Added event handler')
81 |
82 | if hou.isUIAvailable():
83 | from nodegraphtopui import cookNode
84 | cookNode(node)
85 | else:
86 | node.executeGraph(block=True)
87 |
88 |
89 |
--------------------------------------------------------------------------------
/scripts/modules/firehawk_plugin_loader.py:
--------------------------------------------------------------------------------
1 | # import firehawk.api and firehawk.plugins. if user has defined custom asset creation it will be preffered.
2 | import os
3 | import importlib
4 | import pkgutil
5 | import sys
6 | import firehawk.plugins
7 | import firehawk.api
8 | from importlib import reload
9 |
10 | def resolve_debug_default():
11 | if os.getenv('FH_VAR_DEBUG_PDG', '0') in tuple( [str(x) for x in range(12)] ):
12 | debug_default = int( os.getenv('FH_VAR_DEBUG_PDG', '0') ) # if numeric, then use number up to 10.
13 | else:
14 | debug_default = int( (os.getenv('FH_VAR_DEBUG_PDG', 'false').lower() in ('true', 'yes')) ) # resolve env var as int
15 | return debug_default
16 |
17 | debug_default = resolve_debug_default()
18 |
19 | skip_plugins = int(( os.getenv('FH_SKIP_PLUGINS', 'false').lower() in ('true', 'yes', '1') )) # if FH_SKIP_PLUGINS is TRUE, only the api base will be used.
20 |
21 | enforce_only=None
22 | if skip_plugins:
23 | print('WARNING: FH_SKIP_PLUGINS is TRUE. Will not load any user firehawk plugins.') # this env var is provided to test default behaviour.
24 | enforce_only='api'
25 |
26 | def iter_namespace(ns_pkg):
27 | return pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + ".")
28 |
29 | def load_namespace(namespace, module_name=None, verbose=debug_default, reload_module=False): # loads the contents of a namespace ie firehawk.plugins or firehawk.api and returns a list of the contained modules
30 | plugin_modules=[]
31 | for _, name, _ in iter_namespace(namespace):
32 | if module_name == None: # import all
33 | module = importlib.import_module(name)
34 | if verbose: print( 'import module: {}'.format( module ) )
35 | if reload_module and module is not None:
36 | reload(module)
37 | pkgpath = os.path.dirname(namespace.__file__)
38 | plugin_modules.extend( [ name for _, name, _ in pkgutil.iter_modules([pkgpath])] )
39 | else: # determine if single module can be imported
40 | pkgpath = os.path.dirname(namespace.__file__)
41 | module_name_list = [ mname for _, mname, _ in pkgutil.iter_modules([pkgpath])]
42 | if module_name in module_name_list and module_name in name:
43 | if verbose: print( 'importing single module: {} from namespace: {} module_name_list: {}'.format( name, namespace.__name__, module_name_list ) )
44 | module = importlib.import_module(name)
45 | if verbose: print( 'import module: {}'.format( module ) )
46 | if reload_module and module is not None: # reload method needs replacement in py3 with importlib.reload. current method does not work
47 | reload(module)
48 | pkgpath = os.path.dirname(namespace.__file__)
49 | plugin_modules.extend( [ name for _, name, _ in pkgutil.iter_modules([pkgpath])] )
50 | if verbose: print('namespace: {} module names: {}'.format( namespace.__name__, plugin_modules ) )
51 | return plugin_modules
52 |
53 | def load_plugins(module_name=None, verbose=debug_default, reload_module=False): # Load all plugins and returns the list of user plugins. if module_name is provided only loads a specific module
54 | plugin_modules=load_namespace(firehawk.plugins, module_name=module_name, verbose=verbose, reload_module=reload_module)
55 | api_modules=load_namespace(firehawk.api, module_name=module_name, verbose=verbose, reload_module=reload_module)
56 | return plugin_modules, api_modules
57 |
58 | def module_package(module_name, verbose=debug_default, prefer='plugin', only=None): # returns the package that contains a module, prefering a plugin if it exists over an api module. you can enforce loading an api module with only='api'
59 | if enforce_only is not None: # An environment variable FH_SKIP_PLUGINS can be used to disable all plugins and test default behaviour.
60 | only = enforce_only
61 | no_ref, api_modules = load_plugins(module_name=module_name, verbose=verbose)
62 | else:
63 | plugin_modules, api_modules = load_plugins(module_name=module_name, verbose=verbose)
64 | package = None
65 | if only is None:
66 | if prefer == 'plugin':
67 | if module_name in plugin_modules:
68 | if verbose: print('...firehawk.plugins contains: {}.'.format( module_name ))
69 | package = firehawk.plugins
70 | elif module_name in api_modules:
71 | if verbose: print('...firehawk.api contains: {}.'.format( module_name ))
72 | package = firehawk.api
73 | elif prefer == 'api':
74 | if module_name in api_modules:
75 | if verbose: print('...firehawk.api contains: {}.'.format( module_name ))
76 | package = firehawk.api
77 | elif module_name in plugin_modules:
78 | if verbose: print('...firehawk.plugins contains: {}.'.format( module_name ))
79 | package = firehawk.plugins
80 | elif only == 'api' and module_name in api_modules:
81 | if verbose: print('...firehawk.api contains: {}.'.format( module_name ))
82 | package = firehawk.api
83 | elif only == 'plugin' and module_name in plugin_modules:
84 | if verbose: print('...firehawk.plugins contains: {}.'.format( module_name ))
85 | package = firehawk.plugins
86 |
87 | if package is None:
88 | print( 'ERROR aquiring package for module: {} prefer:{} only: {}'.format(module_name, prefer, only) )
89 |
90 | return package
--------------------------------------------------------------------------------
/scripts/modules/TestPreTask.py:
--------------------------------------------------------------------------------
1 | # This deadline pre task script is used in the deadline scheduler in TOPS. it allows overwriting of the hip path to be loaded to optimise for remote locations.
2 |
3 |
4 | import os
5 | import sys
6 | import traceback
7 | import json
8 |
9 | from Deadline.Scripting import *
10 | from Deadline.Plugins import *
11 |
12 |
13 | def append_hlibs_to_sys(hfs):
14 | """
15 | Add the $HFS/bin and $HFS/houdini/python2.7libs directories to sys path
16 | and os.environ['PATH']
17 | """
18 | hfsbin = hfs + "/bin"
19 | hlibs = hfs + "/houdini/python2.7libs"
20 |
21 | hfsbin = hfsbin.encode('ascii', 'ignore')
22 | hlibs = hlibs.encode('ascii', 'ignore')
23 |
24 | # This is required to load the DLLs
25 | sys.path.append(hfsbin)
26 | sys.path.append(hlibs)
27 | print(sys.path)
28 |
29 | path_combine = ':'
30 | if sys.platform == 'win32':
31 | path_combine = ';'
32 |
33 | # This is required to load the pdg module
34 | os.environ['PATH'] = os.environ['PATH'] + path_combine + hfsbin + path_combine + hlibs
35 |
36 |
37 |
38 | def __main__( *args ):
39 | deadlinePlugin = args[0]
40 | job = deadlinePlugin.GetJob()
41 |
42 | deadlinePlugin.LogInfo("In Pre Task")
43 |
44 | # Get HFS
45 | hfs_env = job.GetJobEnvironmentKeyValue('HFS')
46 | deadlinePlugin.LogInfo("got hfs env")
47 | if not hfs_env:
48 | deadlinePlugin.LogWarning('$HFS not found in job environment.')
49 | return 0
50 |
51 | # Evaluate it locallly to this machine
52 | hfs_env = RepositoryUtils.CheckPathMapping(hfs_env)
53 |
54 | # Append Houdini bin and python2.7libs folders
55 | append_hlibs_to_sys(hfs_env)
56 |
57 |
58 | # The task index (corresponds to the task file)
59 | startFrame = deadlinePlugin.GetStartFrame()
60 | # deadlinePlugin.LogInfo("got start frame")
61 |
62 | startupDir = deadlinePlugin.GetStartupDirectory()
63 |
64 | # The PDG job directory will contain the task file
65 | jobDir = deadlinePlugin.GetPluginInfoEntryWithDefault('PDGJobDirectory', '')
66 | if not jobDir:
67 | deadlinePlugin.FailRender('PDGJobDirectory is not specified. Unable to get task file.')
68 |
69 | taskFilePath = os.path.join(jobDir, 'task_{}.txt'.format(startFrame))
70 |
71 |
72 | deadlinePlugin.LogInfo('Looking for task file: {}'.format(taskFilePath))
73 |
74 | # Wait until task file has been synchronized. The file is written by the submission machine and this waits until its available
75 | # in the mounted directory.
76 | # This file contains all the data to execute the work item.
77 | line = deadlinePlugin.WaitForCommandFile(taskFilePath, False, deadlinePlugin.taskFileTimeout)
78 |
79 | deadlinePlugin.LogInfo('Found task file: {}'.format(taskFilePath))
80 |
81 | if not line:
82 | deadlinePlugin.FailRender('Task file not found at {}'.format(taskFilePath))
83 |
84 | executable = None
85 | arguments = ''
86 |
87 | try:
88 | # Load the task file's data as json dict and process properties
89 |
90 | json_obj = json.loads(line)
91 | deadlinePlugin.LogInfo('loaded JSON OBJ')
92 |
93 | executable = RepositoryUtils.CheckPathMapping(json_obj['executable'].replace( "\"", "" ))
94 | arguments = RepositoryUtils.CheckPathMapping(json_obj['arguments'])
95 |
96 | # ### Alter work item command path ###
97 | command = arguments
98 |
99 | if '.hip' in command.lower():
100 | prod_root = os.environ["PROD_ROOT"]
101 | prod_onsite_root = os.environ["PROD_ONSITE_ROOT"]
102 | prod_cloud_root = os.environ["PROD_CLOUD_ROOT"]
103 |
104 | def convert_path(item, target_base_path):
105 | result = None
106 |
107 | if item.startswith(prod_root):
108 | result = "{base}"+item[len(prod_root):]
109 | elif item.startswith(prod_onsite_root):
110 | result = "{base}"+item[len(prod_onsite_root):]
111 | elif item.startswith(prod_cloud_root):
112 | result = "{base}"+item[len(prod_cloud_root):]
113 | else:
114 | deadlinePlugin.LogInfo('no path match to convert path')
115 | result = str(target_base_path+result[len("{base}"):])
116 | return result
117 |
118 | command_list = command.split(' ')
119 | new_command_list = []
120 | for item in command_list:
121 | if '.hip' in item.lower():
122 | item = convert_path(item, prod_root)
123 | new_command_list.append(item)
124 |
125 | command = ' '.join(new_command_list)
126 | deadlinePlugin.LogInfo('command updated:')
127 | json_obj['arguments'] = command
128 |
129 | arguments = RepositoryUtils.CheckPathMapping(json_obj['arguments'])
130 |
131 | with open(taskFilePath, 'w') as outfile:
132 | json.dump(json_obj, outfile)
133 |
134 | deadlinePlugin.LogInfo('dump json data to file: {}'.format(taskFilePath))
135 |
136 | deadlinePlugin.LogInfo('Task Executable: %s' % executable)
137 | deadlinePlugin.LogInfo('Task Arguments: %s' % arguments)
138 |
139 |
140 |
141 | except:
142 | deadlinePlugin.FailRender('Unable to parse task file as json\n\t {}'.format(traceback.format_exc(1)))
143 |
144 |
145 |
146 | # You can update the hip file in the argumens, then write it back into the task file at taskFilePath
147 | # Then the PDGDeadline plugin will again load this file and execute the task.
148 |
149 | deadlinePlugin.LogInfo("Finished Test Pre Task!")
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
--------------------------------------------------------------------------------
/scripts/menus/OPmenu.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
113 |
114 |
--------------------------------------------------------------------------------
/pdg/types/firehawkschedulers/firehawklocal.py:
--------------------------------------------------------------------------------
1 |
2 | from __future__ import print_function
3 |
4 | #
5 | # Copyright (c) <2020> Side Effects Software Inc.
6 | #
7 | # Permission is hereby granted, free of charge, to any person obtaining a copy
8 | # of this software and associated documentation files (the "Software"), to deal
9 | # in the Software without restriction, including without limitation the rights
10 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | # copies of the Software, and to permit persons to whom the Software is
12 | # furnished to do so, subject to the following conditions:
13 | #
14 | # The above copyright notice and this permission notice shall be included in all
15 | # copies or substantial portions of the Software.
16 | #
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23 | # SOFTWARE.
24 |
25 | # NAME: firehawklocal.py ( Python )
26 |
27 | # COMMENTS: Defines a local scheduler implementation that runs work item
28 |
29 | # commands asynchronously. The commands run in separate
30 |
31 | # processes, up to some specified maximum process count.
32 |
33 | ### Do not edit these imports ###
34 |
35 | import json
36 | import os
37 | import shlex
38 | import signal
39 | import subprocess
40 | import sys
41 | import time
42 | import threading
43 | import traceback
44 | from distutils.spawn import find_executable
45 | import six.moves.urllib.parse as parse
46 |
47 | from pdg import Scheduler, scheduleResult, tickResult, ServiceError
48 | from pdg import TypeRegistry
49 | from pdg.job.callbackserver import CallbackServerMixin
50 | from pdg.scheduler import PyScheduler, convertEnvMapToUTF8
51 | from pdg.staticcook import StaticCookMixin
52 | from pdg.utils import expand_vars, print_headless
53 |
54 | ### Do not edit imports above this line. This serves as a baseline for future updates to the localscheduler ###
55 |
56 | import logging
57 | from collections import namedtuple
58 | import re
59 | from pdg import CookError
60 |
61 | sys.path.append(os.path.expandvars("$HFS/houdini/pdg/types/schedulers")) # TODO: Try and remove this if it can be done without producing errors.
62 |
63 | from local import LocalScheduler
64 |
65 | import pdg
66 | import firehawk_submit as firehawk_submit # Firehawk presubmission utils for multiparm versioning.
67 | import firehawk_read
68 |
69 | import firehawk_plugin_loader
70 | pdgkvstore = firehawk_plugin_loader.module_package('pdgkvstore').pdgkvstore
71 | houpdgkvstore = firehawk_plugin_loader.module_package('houpdgkvstore').houpdgkvstore
72 |
73 | import firehawk_plugin_loader
74 | firehawk_logger = firehawk_plugin_loader.module_package('submit_logging').submit_logging.FirehawkLogger()
75 | firehawk_logger.timed_info(label='firehawk scheduler loaded')
76 | firehawk_logger.debug('test debug logger')
77 |
78 | from datetime import datetime
79 |
80 | # Replicate the scheduler with a patched method
81 |
82 | class FirehawkLocalScheduler(LocalScheduler):
83 | def onPreSubmitItem(self, work_item, job_env, item_command):
84 | """
85 | Modifies the item command before it gets submitted to be run.
86 | Intended to be used in a user subclass of this scheduler.
87 | """
88 |
89 | ### Firehawk on schedule version handling dynamically allows a version per wedge to store as a multiparm db.
90 | submitObject = firehawk_submit.submit( debug=self._verbose )
91 | item_command = submitObject.onPreSubmitItem(work_item, job_env, item_command, logger_object=firehawk_logger) # optionally attach a logger object here
92 | if item_command is None:
93 | raise CookError( 'Failed: onPreSubmitItem. item_command: {}'.format(item_command) )
94 |
95 | work_item.setCommand( item_command )
96 |
97 | # could cause instability
98 |
99 | kwargs = {}
100 | required_components = ['job', 'seq', 'shot', 'element', 'variant'] # store kv for work item permanently to aquire logs if cooked from cache / or just generated.
101 | for key in required_components:
102 | value = firehawk_read.getLiveParmOrAttribValue(work_item, key, type='string')
103 | kv = { key: value }
104 | firehawk_logger.debug( 'update kv: {}'.format( kv ) )
105 | kwargs.update( kv )
106 |
107 | kwargs['version'] = 'v'+str( work_item.intAttribValue('version') ).zfill(3)
108 | kwargs['subindex'] = work_item.batchIndex
109 |
110 | firehawk_logger.debug('kvstore kwargs: {}'.format( kwargs ))
111 | if all( [ x in kwargs for x in required_components ] ): # If all components are available, then stash job data in our kv store.
112 | key = '{}/{}/{}/{}/{}/{}/{}'.format( kwargs['job'], kwargs['seq'], kwargs['shot'], kwargs['element'], kwargs['variant'], kwargs['version'], kwargs['subindex'] )
113 | value = {
114 | 'log_uri': { 'value': self.getLogURI(work_item), 'type': 'string'}
115 | }
116 | firehawk_logger.debug('write: {}'.format( value ) )
117 | pdgkvstore.work_item_db_put(key, value, graph=work_item.graph) # write value to disk, or a database if available.
118 |
119 | self._verboseLog("End presubmit")
120 |
121 | return item_command
122 |
123 | # getting a log can be customised to be based on an ouput path making log aquisition based on past cooks possible
124 |
125 | # def getLogURI(self, work_item):
126 | # if work_item.intAttribValue('updated_itemcommand'): # The work item has been scheduled in this session.
127 | # log_path = '{}/logs/{}.log'.format(self.tempDir(True), work_item.name)
128 | # uri = 'file://' + log_path
129 | # else: # we need to aquire the uri from our kv store
130 | # json_object = houpdgkvstore.getPdgKvStore(work_item) # attempt to retrieve work item data from kv store if attribs are not available.
131 | # if not isinstance(json_object, dict) or len(json_object)==0: # Couldn't retrieve a kvstore for the work item,
132 | # return ''
133 | # required_attribs = ['log_uri']
134 | # if not isinstance(json_object, dict) or not all( [ x in json_object for x in required_attribs ] ):
135 | # return '' # it wasn't possible to retrieve any valid data.
136 | # uri = json_object['log_uri']['value']
137 | # return uri+'?file/firehawk/log'
138 |
139 | def registerTypes(type_registry):
140 | firehawk_logger.info("Init firehawklocalscheduler schedulers")
141 |
142 |
--------------------------------------------------------------------------------
/pdg/types/firehawkschedulers/custom_handlers.py:
--------------------------------------------------------------------------------
1 | # custom handlers allow us to control if cache items exist / or maked dirty and cooking should occur. In our case if the path schema changes via an upstream node, or if the output path on the target has been wiped, we need to force a cook.
2 | # if the work item thinks it's already cooked and the path doesn't change on the node directly, then it wouldn't catch this case. so we force a cook here for handle it.
3 |
4 | import os, pdg, hou
5 | import firehawk_submit as firehawk_submit
6 | import firehawk_read
7 |
8 | import firehawk_plugin_loader
9 | debug_default = firehawk_plugin_loader.resolve_debug_default()
10 | firehawk_logger = firehawk_plugin_loader.module_package('submit_logging').submit_logging.FirehawkLogger(debug=debug_default)
11 |
12 | def simple_handler(local_path, raw_file, work_item):
13 | firehawk_logger.debug(local_path)
14 | return pdg.cacheResult.Skip
15 |
16 | def custom_handler(local_path, raw_file, work_item):
17 | # firehawk_logger.timed_info(label='custom_handlers.py: timed_info')
18 | firehawk_logger.debug('')
19 | firehawk_logger.debug('### CUSTOM CACHE HANDLER ### {}'.format( work_item.name ) )
20 | firehawk_logger.debug('local_path: {}'.format( local_path ) )
21 | # Skip work items that have custom caching disabled.
22 | # if firehawk_submit.submit().getLiveParmOrAttribValue(work_item, 'use_custom_caching') == 0:
23 | # return pdg.cacheResult.Skip
24 | def endMessage():
25 | firehawk_logger.debug( '### CUSTOM CACHE HANDLER END ###' )
26 | try:
27 | set_output = None
28 | if int( os.getenv('PDG_USE_CUSTOM_EXPRESSION_HANDLER', '1') ) and work_item.isNoGenerate == False: # This funcitonality can be disabled if it is suspected of causing a hang. SESI state this may be fixed after H18.5.430
29 | firehawk_logger.timed_debug( '...Use custom logic to see if the path schema has changed. Ensures the expression is correct, not just the file on disk.' )
30 | with work_item.makeActive():
31 | firehawk_logger.timed_debug('get hou node path')
32 | hou_node_path = firehawk_read.get_hou_node_path(work_item)
33 | hou_node = hou.node(hou_node_path)
34 | # set_output = None
35 | firehawk_logger.timed_debug('read set_out')
36 | set_output = work_item.stringAttribValue('set_output')
37 |
38 | if set_output is None:
39 | firehawk_logger.warning('set_output is None. Ensure work_item.stringAttribValue(\'set_output\') is set correctly')
40 | endMessage()
41 | return pdg.cacheResult.Miss
42 |
43 | set_output = firehawk_read.resolve_pdg_vars(set_output, work_item=work_item)
44 | firehawk_logger.timed_debug('set_output: {}'.format(set_output) )
45 | # set_output = firehawk_read.getLiveParmOrAttribValue(work_item, 'set_output', debug=debug) # set_output is the unexpanded expression that should be used on the target.
46 | # get_output = None
47 |
48 | # Node: /obj/sop_geo_process/topnet1/outputprep2/pythonprocessor1 parm: set_output with tags: {'format': u'bgeo.sc', 'res': u'1920_1080_bgeo.sc', 'asset_type': u'geocache', 'job': u'stereo', 'volatile': u'off', 'create_asset': False, 'animating_frames': u'$F4', 'use_inputs_as': 'channels'}
49 | firehawk_logger.timed_debug('get output')
50 | get_output = firehawk_read.get_output(hou_node, work_item=work_item, set_output=set_output, debug=debug_default) # get_output is the current expression on the target. if they dont match, the work item must be queued.
51 | firehawk_logger.timed_debug('get output: {}'.format(get_output) )
52 | # set_index_key = None
53 | firehawk_logger.timed_debug('read index key')
54 | set_index_key = work_item.stringAttribValue('index_key_unexpanded')
55 | firehawk_logger.timed_debug('get index_key_expr')
56 | # set_index_key = firehawk_read.getLiveParmOrAttribValue(work_item, 'index_key', debug=debug)
57 | # index_key_expr = None
58 | index_key_expr = firehawk_read.get_output_index_key_expr(hou_node, debug=debug_default)
59 | firehawk_logger.timed_debug('done aquisition')
60 |
61 | if (set_output is not None) and (get_output is None):
62 | firehawk_logger.timed_debug( 'Result: Miss. Output on target is not yet set. get_output: {} set_output: {}'.format( get_output, set_output ) )
63 | endMessage()
64 | return pdg.cacheResult.Miss
65 |
66 | if (set_index_key is not None) and (index_key_expr is None):
67 | firehawk_logger.timed_debug( 'Result: Miss. index_key on target is not yet set.' )
68 | endMessage()
69 | return pdg.cacheResult.Miss
70 |
71 | if (set_output is not None) and (get_output is not None) and len(set_output)>0 and (get_output != set_output):
72 | firehawk_logger.timed_debug( 'Result: Miss. Output schema not matching current output parm. Will Cook.' )
73 | endMessage()
74 | return pdg.cacheResult.Miss
75 |
76 | if (set_index_key is not None) and (index_key_expr is not None) and len(set_index_key)>0 and (index_key_expr != set_index_key):
77 | firehawk_logger.timed_debug( 'Result: Miss. Output index_key not matching current index_key parm. Will Cook.' )
78 | endMessage()
79 | return pdg.cacheResult.Miss
80 |
81 | # If these unexpanded strings do not match, force a cook since the schema has changed.
82 | if not os.path.isfile(local_path):
83 | firehawk_logger.timed_debug( 'Result: Miss. no file: {}'.format( local_path ) )
84 | endMessage()
85 | return pdg.cacheResult.Miss
86 |
87 | firehawk_logger.timed_debug( 'Check if file has no size on disk {}'.format( local_path ) )
88 |
89 | if local_path and os.stat(local_path).st_size == 0:
90 | firehawk_logger.timed_debug( 'Result: Miss. no file / file with size 0' )
91 | endMessage()
92 | return pdg.cacheResult.Miss
93 |
94 | firehawk_logger.timed_debug( 'Result: Hit.' )
95 | endMessage()
96 | return pdg.cacheResult.Hit
97 | except Exception as e:
98 | print("### EXCEPTION ### ERROR:")
99 | print(str(e))
100 | print( 'Result: Miss.' )
101 | endMessage()
102 | return pdg.cacheResult.Miss
103 |
104 | # exceptions = []
105 | # tag_list = pdg.TypeRegistry.types().tags
106 |
107 | # def registerTypes(type_registry):
108 | # for tag in tag_list:
109 | # if tag in exceptions:
110 | # print('Simple handler for tag {}'.format(tag))
111 | # type_registry.registerCacheHandler(tag, simple_handler)
112 | # else:
113 | # print('Custom handler for tag {}'.format(tag))
114 | # type_registry.registerCacheHandler(tag, custom_handler)
115 |
116 |
--------------------------------------------------------------------------------
/scripts/modules/README:
--------------------------------------------------------------------------------
1 |
2 | Firehawk dynamic versions allows a JSON file or pdg attribute to define the set of versions for any houdini node that should be present on a multiparm. It will also create the multiparm if not present.
3 |
4 | Minimum Usage:
5 |
6 | - a dictionary with entries defining versions (see below for more information on this):
7 | json_object[ version_db_hou_node.path() ][ 'version_'+index_key ] = str(aquired_version)
8 | - entries for the output parms expressions used to write and read files.
9 | json_object[ version_db_hou_node.path() ][ 'parm_'+parm_name ] = my_expression
10 | where my expression uses unexpanded strings like:
11 | /prod/`chs("seq")`/`chs("shot")`/components/`chs("element")`/geoCache/`chs("variant")`/`chs("version_str")`/bgeo.sc/stereo_`chs("seq")`_`chs("shot")`_`chs("element")`_`chs("variant")`_`chs("version_str")`.$F4.bgeo.sc
12 | - user data on '/' that defines the path 'last_submitted_hip_file' intended for the farm
13 | - The above json_object data is dumped to a .json file matching the name of 'last_submitted_hip_file'
14 |
15 | Reading the dictionary from a PDG work item attribute allows it to be updated for each work item progressively throughout a submission. It would also be possible to read an environment variable to avoid disk if not using PDG. The disk file should always be written even if not used since it is needed for file recovery and consistency of the versions if the hip is reused.
16 |
17 | If the dict is read off disk, a limitation is it should only be written once per submission, before the file is loaded by the intend hip file that will load the data. It should have the same name as the hip file being submitted to the farm (replacing .hip with .json), existing in the same folder. It is a side car file intended for that hip file's usage which is the reason for the naming convention and one wont work without the other.
18 |
19 | The included 456.py in this repository shows how the file can be pulled on hip load. pull_all_versions_to_all_multiparms will look for the json file that matches the current hip path stored as user data 'last_submitted_hip_file' on '/'.
20 |
21 | If 'last_submitted_hip_file' user data has been set, then it is possible for the current user's hip file to also load the data, not just a hip file intended for the renderfarm, ensuring consistency.
22 |
23 | If you intend to update the versions multiple times throughout a submission, but only submit one hip file, and not use a pdg work item to define the dictionary, you would instead want the json dictionary to be a base64 encoded enviroment variable. This is not currently supported, but would be fairly trivial to implement.
24 |
25 | In order for people to make alterations to versions in their hip file that wont be replaced, so long as the last json file was loaded, they can make changes after that point. The json file will only define changes to the parms required, and they are only applied if the last md5 recorded for a dictionary doesn't match the incoming json dictionary.
26 | When versions or other parameter replacements are pulled from the json file, an MD5 hash of that dictionary is stored on '/' as user data which allows us to verify if the data was pulled, and avoid repeated application of the version set.
27 |
28 | See the end of firehawk_submit.py for how we write the json file structure. We should probably write basic methods to manage the json_dict structure going forward and keep them in firehawk_dynamic_versions.py
29 |
30 | example json dict entry:
31 | json_object[ version_db_hou_node.path() ][ 'version_'+index_key ] = str(aquired_version)
32 |
33 | Where aquired_version is an int and index_key is a string of the form:
34 | 'seq.shot.element.variant'
35 |
36 | version_db_hou_node is the node intended to store the data, usually a rop geometry node or similar. It is possible for the node that stores the data as user data to be different to where the multiparm resides for reasons in PDG, but generally assume these nodes should be the same.
37 |
38 | The data is written to disk on the line:
39 | json.dump( json_object, versiondb_file )
40 |
41 | How can wedges define what is written / read?
42 | Per wedge variations:
43 | The dictionary can also define other direct parm value replacements:
44 | json_object[ hou_node.path() ][ 'parm_'+parm_name ] = value
45 |
46 | We use this to update the index key expression on the node. The value of the index key resolves the values for the seq, shot, element, variant, and version parms at the top of the spare parm folder for each node.
47 |
48 | In firehawk submit we use:
49 | self.persistent_override( hou_node_path=hou_node_path, parm_name='version_db_index_key', value=index_key, existence_check=False )
50 | but this should probably become a seperate function in firehawk_dynamic verisons...
51 |
52 | This expression is unique for each wedge. It could also be avoided by getting the expressions the resolve the result (seq, shot, element, variant, and version) to look at an environment variable. If not using PDG, I'd recommend this expression use environment variables.
53 | '$WEDGE_SEQ.$WEDGE_SHOT.$WEDGE_ELEMENT.$WEDGE_VARIANT'
54 | This would be instead of the pdg equivalent:
55 | '`@seq`.`@shot`.`@element`.`@variant`'
56 |
57 | And then in submission, ensure those env vars are set for the wedge. A helper tool can also be used to help users set the env vars they wish to use.
58 |
59 | Output parm expression:
60 | So with that you have the values that can resolve uniquely for each wedge. We also need to define the expression for how that is used:
61 | self.persistent_override( hou_node_path=hou_node_path, parm_name=output_parm_name, value=set_output )
62 |
63 | set_output is the defined unexpanded string that should resolve to a path on disk:
64 | /prod/`chs("seq")`/`chs("shot")`/components/`chs("element")`/geoCache/`chs("variant")`/`chs("version_str")`/bgeo.sc/stereo_`chs("seq")`_`chs("shot")`_`chs("element")`_`chs("variant")`_`chs("version_str")`.$F4.bgeo.sc
65 |
66 | To see an example of how PDG can use an input schema, the output_prep top node constructs this attribute by a schema provided externally, but you are free to construct this however you wish.
67 | This expression shouldn't have to change per wedge, but it must exist as a minimum, so this parm override is always defined for any node that will write output.
68 |
69 | How to use a single hip file for multiple submisisons:
70 | When PDG is used, each time a the hip file is loaded, a work item attribute specifies the unique value of index_key to be used, which resolves the relevent value for any output parm and version on a node. If not using PDG, and you want to use this tool set for more than just tracking submission history (ie you wnat to actually set output paths for each wedge but rendering a single hip too), then you will need to construct a method so that each wedge will inherit the index key as an environment variable. it will need to be set in the environment, and you will need to set the index_key on the node to resolve env vars instead of pdg attributes.
71 | Ideally you want to place as many known versions and parms that must be set in the dicitionary as possible for a single submisison before submisison occurs, but it would also be possible to store the json object as an env var as well, updating it as you go throughout the submisison.
72 |
73 | The json object should contain all accumulated changes made in a submission. this makes it possible for a dependency to inherit all wedges if they are available, not just the specific wedge being submitted.
--------------------------------------------------------------------------------
/pdg/types/firehawkschedulers/firehawktbdeadline.py:
--------------------------------------------------------------------------------
1 | #
2 | # Copyright (c) <2020> Side Effects Software Inc.
3 | #
4 | # Permission is hereby granted, free of charge, to any person obtaining a copy
5 | # of this software and associated documentation files (the "Software"), to deal
6 | # in the Software without restriction, including without limitation the rights
7 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 | # copies of the Software, and to permit persons to whom the Software is
9 | # furnished to do so, subject to the following conditions:
10 | #
11 | # The above copyright notice and this permission notice shall be included in all
12 | # copies or substantial portions of the Software.
13 | #
14 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20 | # SOFTWARE.
21 | #
22 | # NAME: tbdeadline.py ( Python )
23 | #
24 | # COMMENTS: Defines a Thinkbox Deadline scheduler implementation for PDG.
25 | # This module depends on the deadline commandline
26 | # which is installed with the Deadline client, for example:
27 | # %DEADLINE_PATH%\deadlinecommand.exe
28 | #
29 | # To use this module you must ensure that DEADLINE_PATH is set
30 | # in the environment. This also requires the custom PDGDeadline
31 | # plugin for Deadline that is shipped with Houdini. The plugin
32 | # can be found at $HFS/houdini/pdg/plugins
33 | #
34 | # Currently supports both new and old MQ.
35 | # New MQ path uses the ServiceManager to share MQ use.
36 | # Supports MQ on farm, local, or connecting to existing.
37 | #
38 |
39 | import datetime
40 | import os
41 | import sys
42 | import shutil
43 | import time
44 | import shlex
45 | import re
46 | import json
47 | import traceback
48 | import socket
49 | import threading
50 | import six
51 | from threading import Thread
52 | from collections import deque
53 |
54 | import six.moves.xmlrpc_client as xmlrpclib
55 | from six.moves.queue import Queue, Empty
56 | import six.moves.urllib.parse as parse
57 | from six.moves import zip
58 |
59 | from pdg import attribFlag
60 | from pdg import serviceState, scheduleResult, tickResult, CookError, ServiceError
61 | from pdg import TypeRegistry
62 | from pdg.job.eventdispatch import EventDispatchMixin
63 | from pdg.job.mqrelay import MQRelay
64 | from pdg.job.callbackserver import CallbackServer
65 | from pdg.scheduler import PyScheduler
66 |
67 | import pdgd
68 |
69 | from . import tbdeadline_utils as tbut
70 | from pdg.utils.mq import MQUtility, MQInfo, MQState, MQUsage, MQSchedulerMixin
71 |
72 | from pdgutils import PDGNetMQRelay, mqGetError, mqCreateMessage, PDGNetMessageType
73 |
74 | ### Do not edit imports above this line. This serves as a baseline for future updates to the localscheduler ###
75 |
76 | import logging
77 | from collections import namedtuple
78 | import re
79 | from pdg import CookError
80 |
81 | sys.path.append(os.path.expandvars("$HFS/houdini/pdg/types/schedulers")) # TODO: Try and remove this if it can be done without producing errors.
82 |
83 | from tbdeadline import DeadlineScheduler
84 |
85 | import pdg
86 | import firehawk_submit as firehawk_submit # Firehawk presubmission utils for multiparm versioning.
87 | import firehawk_read
88 |
89 | import firehawk_plugin_loader
90 | pdgkvstore = firehawk_plugin_loader.module_package('pdgkvstore').pdgkvstore
91 | houpdgkvstore = firehawk_plugin_loader.module_package('houpdgkvstore').houpdgkvstore
92 |
93 | import firehawk_plugin_loader
94 | firehawk_logger = firehawk_plugin_loader.module_package('submit_logging').submit_logging.FirehawkLogger()
95 | firehawk_logger.timed_info(label='firehawk scheduler loaded')
96 | firehawk_logger.debug('test debug logger')
97 |
98 | from datetime import datetime
99 |
100 | # Replicate the scheduler with a patched method
101 |
102 | class FirehawkDeadlineScheduler(DeadlineScheduler):
103 | """
104 | Modified Firehawk Scheduler implementation for Thinkbox's Deadline scheduler.
105 | """
106 | def onPreSubmitItem(self, work_item, job_env, item_command):
107 | """
108 | Modifies the item command before it gets submitted to be run.
109 | Intended to be used in a user subclass of this scheduler.
110 | """
111 |
112 | ### Firehawk on schedule version handling dynamically allows a version per wedge to store as a multiparm db.
113 | submitObject = firehawk_submit.submit( debug=self._verbose )
114 | item_command = submitObject.onPreSubmitItem(work_item, job_env, item_command, logger_object=firehawk_logger) # optionally attach a logger object here
115 | if item_command is None:
116 | raise CookError( 'Failed: onPreSubmitItem. item_command: {}'.format(item_command) )
117 |
118 | work_item.setCommand( item_command )
119 |
120 | kwargs = {}
121 | required_components = ['job', 'seq', 'shot', 'element', 'variant'] # store kv for work item permanently to aquire logs if cooked from cache / or just generated.
122 | for key in required_components:
123 | value = firehawk_read.getLiveParmOrAttribValue(work_item, key, type='string')
124 | kv = { key: value }
125 | firehawk_logger.debug( 'update kv: {}'.format( kv ) )
126 | kwargs.update( kv )
127 |
128 | kwargs['version'] = 'v'+str( work_item.intAttribValue('version') ).zfill(3)
129 | kwargs['subindex'] = work_item.batchIndex
130 |
131 | firehawk_logger.debug('kvstore kwargs: {}'.format( kwargs ))
132 | if all( [ x in kwargs for x in required_components ] ): # If all components are available, then stash job data in our kv store.
133 | key = '{}/{}/{}/{}/{}/{}/{}'.format( kwargs['job'], kwargs['seq'], kwargs['shot'], kwargs['element'], kwargs['variant'], kwargs['version'], kwargs['subindex'] )
134 | value = {
135 | 'log_uri': { 'value': self.getLogURI(work_item), 'type': 'string'}
136 | }
137 | firehawk_logger.debug('write: {}'.format( value ) )
138 | pdgkvstore.work_item_db_put(key, value, graph=work_item.graph) # write value to disk, or a database if available.
139 |
140 | self._verboseLog("End presubmit")
141 |
142 | return item_command
143 |
144 | def _scheduleTaskForWorkItem(self, work_item, local_job_dir):
145 | """
146 | Writes out the task file for the given work item, and
147 | append it to the pending tasks queue for submission.
148 | """
149 | new_task_id = self.next_task_id
150 | submit_as_job = False
151 |
152 | self.onPreSubmitItem(work_item, work_item.environment, work_item.command) # update the work item to handle autoversioning and optionally modify the work_item command
153 |
154 | self._writeTaskFile(new_task_id, work_item, local_job_dir, work_item.command,
155 | work_item.name, work_item.index, work_item.node, submit_as_job)
156 | self.next_task_id = self.next_task_id + 1
157 |
158 | # Add to pending queue to process in update
159 | self.pending_tasks_queue.append((new_task_id, work_item))
160 |
161 | def registerTypes(type_registry):
162 | firehawk_logger.info("Init firehawkdeadlinecheduler schedulers")
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/output_prep.py:
--------------------------------------------------------------------------------
1 | import os
2 | import traceback
3 | import firehawk_plugin_loader
4 | firehawk_logger = firehawk_plugin_loader.module_package('submit_logging').submit_logging.FirehawkLogger()
5 |
6 | firehawk_logger.timed_info(label='output_prep plugin loaded')
7 |
8 | ### IMPORTANT: DO NOT IMPORT HOU HERE.
9 |
10 | def parm_format_menu_list():
11 | result = [ 'bgeo.sc', "Bgeo (Blosc)", # default shoudl be item zero. string refs to defaults can cause errors due to a bug.
12 | 'bgeo.gz', "Bgeo (Zipped)",
13 | 'bgeo', "Bgeo",
14 | 'obj', "Obj",
15 | 'abc', "MeshCache (abc)",
16 | 'sim', "Dop Sim",
17 | 'simdata', "Dop Simdata",
18 | 'pc', "Pointcloud (pc)",
19 | 'abc', 'Alembic',
20 | 'vdb', 'OpenVDB',
21 | 'vdbpoints', 'OpenVDB Points',
22 | 'ass', 'Arnold Ass File',
23 | 'exr', 'Exr',
24 | 'jpg', 'Jpg',
25 | 'mp4', 'Mp4',
26 | 'ip', 'Ip (Interactive Mplay)',
27 | 'hip', 'Hip',
28 | 'py', 'Py'
29 | ]
30 | return result
31 |
32 | def parm_asset_type_menu_list():
33 | result = [ 'geocache', "Geo Cache", # Default should be item 0, normally used for houdini caching with bgeo.sc
34 | 'meshcache', "Mesh Cache",
35 | 'pointcache', "Point Cache",
36 | 'volumecache', "Volume Cache",
37 | 'rendering', "Rendering",
38 | 'renderarchive', "Render Archive",
39 | 'setup', "Setup",
40 | 'script', "Script"
41 | ]
42 | return result
43 |
44 | def prepare_output_type(work_item, output_topnode_path, output_topnode_type_name, output_format):
45 | if output_format == "mp4":
46 | work_item.setStringAttrib('rop', output_topnode_path) # onscheduled callback still needs to consider the output for versioning.
47 | work_item.setStringAttrib('outputparm', 'outputfilepath')
48 | # file_name_result used to be set to the asset path. this may be required again in the future. TODO: validate this.
49 | elif len(output_topnode_type_name) > 0 and output_topnode_type_name == 'ropcomposite':
50 | work_item.setStringAttrib('rop', output_topnode_path)
51 | work_item.setStringAttrib('outputparm', 'copoutput')
52 |
53 | # {"/obj/sop_geo_process/topnet1/ropfetch2": {"output_topnode_type_name": "ropfetch", "override_origin_path": "/obj/sop_geo_process/topnet1/outputprep2/pythonprocessor1", "override_target_type_name": "ropfetch", "output_topnode_path": "/obj/sop_geo_process/topnet1/ropfetch2", "child_rops": [], "override_target": "/obj/sop_geo_process/topnet1/ropfetch2"}}
54 |
55 | # this module might be crashing on import hou ?
56 | def update_workitems(pdg_node, item_holder, upstream_items, generation_type, eval_output_expr_once=True):
57 | # update_workitems is used to attach submission data required for auto versioning, asset creation, and scheduler customisation.
58 | firehawk_logger.info('\nupdate_workitems')
59 |
60 | # pdg_node - A reference to the current pdg.Node instance
61 | # item_holder - A pdg.WorkItemHolder for constructing and adding work items
62 | # upstream_items - The list of work items in the node above, or empty list if there are no inputs
63 | # generation_type - The type of generation, e.g. pdg.generationType.Static, Dynamic, or Regenerate
64 | # eval_output_expr_once - since the output expression can apply to all output, it may only need to be evaluated once. evaluation of this for every work item could be very slow since it may involve a request to some db or schema once per item.
65 |
66 | firehawk_logger.timed_info(label='update_workitems: start')
67 | import pdg, json
68 | firehawk_logger.timed_info( label='update_workitems: iter node_list:' )
69 |
70 | try:
71 | output_nodes_kv_str = str( pdg_node.parameter("output_nodes_kv").evaluate() )
72 | output_nodes_kv = json.loads( output_nodes_kv_str )
73 | except:
74 | print('ERROR: Could not get json from string. pdg_node: {}'.format( pdg_node ) )
75 | output_nodes_kv = {}
76 |
77 | if output_nodes_kv is None or len(output_nodes_kv) == 0:
78 | return
79 |
80 | node_list = [ output_nodes_kv[x]['output_topnode_path'] for x in output_nodes_kv ]
81 |
82 | if len(node_list) == 0:
83 | raise pdg.CookError("Must specify a downstream top node to submit")
84 |
85 | keys = []
86 | pydict = {}
87 | for k,v in output_nodes_kv.items():
88 | keys.append(k) # list keys
89 |
90 | # This data is intended for downtream
91 | override_target = v["override_target"]
92 | pydict[override_target] = {}
93 | pydict[override_target]["override_origin_path"] = v["override_origin_path"]
94 | pydict[override_target]["pdg_node_name"] = pdg_node.name
95 |
96 | output_topnode_path = output_nodes_kv[ keys[0] ]["output_topnode_path"] # presently the rop attribute only supports one item. This method should potentially be relocated to the onschedule callback to support multiple nodes.
97 | output_topnode_type_name = output_nodes_kv[ keys[0] ]["output_topnode_type_name"]
98 | output_format = pdg_node.parameter('format').evaluateString()
99 | set_output = None
100 | index_key_unexpanded = pdg_node.parameter('index_key').evaluateString()
101 |
102 | asset_type = pdg_node.parameter('asset_type').evaluateString()
103 |
104 | render_asset_types = [ 'renderarchive', 'rendering' ]
105 | resolution = None
106 | if asset_type in render_asset_types: # Add resolution if we are rendering.
107 | resolution = pdg_node.parameter('resolution').evaluateInt()
108 |
109 | for item in upstream_items:
110 | firehawk_logger.timed_info( label='update_workitems: start item: {}'.format( item ) )
111 | options = pdg.WorkItemOptions()
112 | options.cloneMode = pdg.cloneMode.Always
113 | options.cloneTarget = item
114 | options.cloneResultData = True
115 | options.inProcess = True
116 | options.parent = item
117 | work_item = item_holder.addWorkItem(options) # clone upstream items
118 |
119 | firehawk_logger.timed_info( label='update_workitems: iter parms: {}'.format( item ) )
120 |
121 | work_item.addAttrib('overrides', pdg.attribType.PyObject)
122 | work_item.setPyObjectAttrib('overrides', pydict) # set the attribute
123 |
124 | firehawk_logger.timed_info( label='update_workitems: done iter node list: {}'.format( item ) )
125 |
126 | # An output node can record many output paths (wedges/shots/assets). The index key determines what should be currently read from or written to. if the index key doesn't exist, a new entry is created.
127 | # index_key_unexpanded = pdg_node['index_key'].evaluateString(work_item)
128 | work_item.setStringAttrib('index_key_unexpanded', index_key_unexpanded)
129 |
130 | try:
131 | firehawk_logger.timed_info( label='update_workitems: make active' )
132 |
133 | with work_item.makeActive():
134 | if eval_output_expr_once:
135 | if set_output is None:
136 | set_output = pdg_node.parameter('set_output').evaluateString() # output parm uses in this case a constant expression that we only need to eval once. it doesn't vary per work item. This operation is expensive.
137 | else:
138 | set_output = pdg_node.parameter('set_output').evaluateString() # evaluating this as a unique value for every work item is not recommended, this logic may take a long time to do so!
139 |
140 | index_key_expanded = pdg_node['index_key_expanded'].evaluateString()
141 |
142 | work_item.setStringAttrib('set_output', set_output)
143 |
144 | firehawk_logger.timed_info( label='update_workitems: set index key: {}'.format( item ) )
145 |
146 | work_item.setStringAttrib('index_key', index_key_expanded) # precompute the index key for submission. it is ephemeral, should not be used beyond the next cook node.
147 | work_item.setStringAttrib('index_key_expanded', index_key_expanded)
148 |
149 | firehawk_logger.timed_info( label='update_workitems: done set index key: {}'.format( item ) )
150 | if resolution is not None:
151 | work_item.setIntAttrib('resolution', resolution)
152 |
153 | work_item.setIntAttrib('onScheduleVersioned', 0) # This attribute is updated if items get scheduled to track asset creation.
154 | firehawk_logger.timed_info( label='update_workitems: prepare_output_type: {}'.format( item ) )
155 | prepare_output_type(work_item, output_topnode_path, output_topnode_type_name, output_format)
156 | firehawk_logger.timed_info( label='update_workitems: end item: {}'.format( item ) )
157 | except Exception as e:
158 | msg='\n\noutput_prep.py failed while constructing workitem:\n{}'.format( traceback.format_exc(e) )
159 | raise Exception( msg )
160 | #raise pdg.CookError("output_prep.py failed while constructing workitem")
161 |
162 | firehawk_logger.info('update_workitems done.\n\n')
--------------------------------------------------------------------------------
/scripts/modules/firehawk/api/create_asset.py:
--------------------------------------------------------------------------------
1 | # firehawk.api: This plugin creates an asset from a work item
2 |
3 | import os
4 | import re
5 | import traceback
6 |
7 | import firehawk_plugin_loader
8 | submit_logging = firehawk_plugin_loader.module_package('submit_logging').submit_logging
9 | debug_default = submit_logging.resolve_debug_default()
10 | if debug_default <= 9: debug_default = 0 # disable logging of asset creation if FH_VAR_DEBUG_PDG env var is below 10
11 | firehawk_logger = submit_logging.FirehawkLogger(debug=debug_default)
12 |
13 | firehawk_logger.timed_info(label='create_asset plugin loaded')
14 | firehawk_logger.debug('test debug logger')
15 |
16 | from os.path import sep, join
17 | def pjoin(*args, **kwargs):
18 | return join(*args, **kwargs).replace(sep, '/') # for windows compatibility.
19 |
20 | def add_dependency(path, ancestor_path): # this is an asset dependency db hook, not execution dependency
21 | return
22 |
23 | def rebuild_tags(tags): # A method that can be customised to rebuild / alter tags used for asset creation. For example, and output type of 'rendering', may require an extension tag to be customised. This method can be patched to do so.
24 | return tags
25 |
26 | def get_tags_for_submission_hip(hip_name, element='pdg_setup', variant='', parent_top_net=None): # a hip file for submission can have its own asset if needed
27 | if parent_top_net is None:
28 | raise Exception('ERROR: parent_top_net not provided: get_tags_for_submission_hip()')
29 |
30 | job_value = parent_top_net.parm('job').evalAsString()
31 | seq_value = parent_top_net.parm('seq').evalAsString()
32 | shot_value = parent_top_net.parm('shot').evalAsString()
33 |
34 | if False in [ len(x)>0 for x in [ job_value, seq_value, shot_value ] ]:
35 | raise Exception('ERROR: job, seq, shot parm not set on topnet: get_tags_for_submission_hip()')
36 |
37 | tags = {
38 | 'job': job_value,
39 | 'seq': seq_value,
40 | 'shot': shot_value,
41 | 'element': element,
42 | 'variant': variant,
43 | 'asset_type': 'setup',
44 | 'format': 'hip',
45 | 'version': -1, # When -1, the asset function will auto increment the version.
46 | 'volatile': False,
47 | 'pdg_dir': os.path.dirname( hip_name ) # pdg_dir is used by default for asset creation, but can be ignored if you create a custom asset method.
48 | }
49 | return tags
50 |
51 | def returnExtension(**tags): # some formats are not literal extensions, so we extract those here.
52 | if tags['format'] == 'vdbpoints':
53 | tags['extension'] = 'vdb' # path creation for vdb points needs to be remapped
54 | else:
55 | tags['extension'] = tags['format']
56 | return tags['extension']
57 |
58 | def rebuildTagsForFileName(**tags):
59 | if 'dir_name' not in tags.keys(): tags['dir_name'] = None
60 | tags['extension'] = returnExtension(**tags)
61 |
62 | if 'format' in tags.keys() and 'dir_name' in tags.keys(): # append the format into the path if it is known
63 | tags['dir_name'] = pjoin(tags['dir_name'], tags['extension'])
64 |
65 | if tags['asset_type']=='setup':
66 | tags['file_name'] = '{}_{}_{}_{}_{}_{}.{}'.format(tags['job'], tags['seq'], tags['shot'], tags['element'], tags['variant'], tags['version_str'], tags['extension'])
67 | elif 'animating_frames' in tags.keys() and 'format' in tags.keys(): # if format and animating_frames ('single' or '$F4') are provided, then a file name can also be returned'
68 | tags['file_name'] = '{}_{}_{}_{}_{}_{}.{}.{}'.format(tags['job'], tags['seq'], tags['shot'], tags['element'], tags['variant'], tags['version_str'], tags['animating_frames'], tags['extension'])
69 | else:
70 | tags['file_name'] = None
71 |
72 | firehawk_logger.debug( "tags['dir_name'] : {}, tags['file_name'] {}".format( tags['dir_name'], tags['file_name'] ) )
73 |
74 | return tags
75 |
76 | def returnFileName(**tags): # format's the provided tags and version into a file name. The extension is also a subfolder under the asset path.
77 | tags = rebuildTagsForFileName(**tags)
78 | return tags['dir_name'], tags['file_name']
79 |
80 | def _ensure_dir_exists(dir_name):
81 | if not os.path.exists(dir_name):
82 | try:
83 | os.makedirs(dir_name)
84 | except:
85 | firehawk_logger.warning('Could not create path: {} Check permissions.'.format( dir_name ) )
86 | return
87 |
88 | def _create_asset(tags, auto_version=True, version_str=None, create_dirs=True): # This example function creates an asset by simply making a new directory, but it is also possible to replace this with the ability to request a new asset from a db/server. The method can also return a path without creating a directory. This method should not be referenced externally. It can also return an existing path defined by tags with create_dirs=False
89 | firehawk_logger.debug('_create_asset:')
90 |
91 | if 'pdg_dir' not in tags: # if PDG_DIR was not resolved, we will not be creating an asset.
92 | create_dirs = False
93 | pdg_dir = '__PDG_DIR__'
94 | else:
95 | pdg_dir = tags['pdg_dir']
96 |
97 | default_prod_root= pjoin( os.path.normpath( pdg_dir ) , 'output' ) # if pdg dir is not in tags, resolve standard houdini placeholder
98 |
99 | prod_root = os.getenv('PROD_ROOT', default_prod_root) # the env var PROD_ROOT can override an absolute output path.
100 |
101 | dir_name = pjoin(prod_root, tags['job'], tags['seq'], tags['shot'], tags['element'], tags['variant'], tags['asset_type']) # the base path before the version folder
102 | firehawk_logger.debug('_create_asset: {}'.format(dir_name))
103 | if create_dirs: _ensure_dir_exists(dir_name)
104 |
105 | if auto_version: # aquire the next version for output and create the directory.
106 |
107 | contained_dirs = os.listdir(dir_name)
108 |
109 | firehawk_logger.debug('contained_dirs: {}'.format( contained_dirs ))
110 | version_int_list = [ version_str_to_int(x, silent=True) for x in contained_dirs if version_str_to_int(x, silent=True) is not None ] # get all versions in dir
111 | if len( version_int_list ) > 0:
112 | latest_current_version = max( version_int_list )
113 | else:
114 | latest_current_version = 0
115 | version_str = 'v'+str( latest_current_version+1 ).zfill(3)
116 |
117 | elif version_str is None:
118 | raise Exception('ERROR: auto_version is false but no version_str provided.')
119 |
120 | dir_name = pjoin(dir_name, version_str) # the full path with the version
121 | if create_dirs: _ensure_dir_exists(dir_name)
122 |
123 | return dir_name, version_str
124 |
125 |
126 |
127 | def get_requirements():
128 | return ['job', 'seq', 'shot', 'element', 'variant', 'asset_type', 'volatile' ]
129 |
130 | def getAssetPath(**tags): # This function should return a path and filename for an asset with the tags dict input provided.
131 | firehawk_logger.debug('get_requirements(): {}'.format( get_requirements() ) )
132 |
133 | requirements = tags.get('requirements', get_requirements())
134 | firehawk_logger.debug( 'Requirements to getAssetPath: {}'.format( tags ) )
135 |
136 | for key in requirements:
137 | if key not in tags:
138 | firehawk_logger.warning( 'Error: Missing key: {}'.format(key) )
139 | elif tags[key] is None:
140 | firehawk_logger.warning( 'Error: Key: {} Value is: {}'.format(key, None) )
141 |
142 | tags['dir_name'], tags['file_name'] = None, None
143 | if tags['volatile']=='on': tags['volatile']=True
144 | if tags['volatile']=='off': tags['volatile']=False
145 |
146 | firehawk_logger.debug( 'getAssetPath with tags: {}'.format(tags) )
147 |
148 | _create_asset = firehawk_plugin_loader.module_package('create_asset').create_asset._create_asset
149 |
150 | if 'version_str' in tags and tags['version_str'] is not None:
151 | firehawk_logger.debug( 'getAssetPath with specified version - createAssetsFromArguments' )
152 | tags['dir_name'], tags['version_str'] = _create_asset( tags, auto_version=False, version_str=tags['version_str'], create_dirs=False )
153 | else:
154 | firehawk_logger.debug( 'getAssetPath default first version - createAssetsFromArguments' )
155 | tags['dir_name'], tags['version_str'] = _create_asset( tags, auto_version=False, version_str='v001', create_dirs=False )
156 |
157 | tags['dir_name'], tags['file_name'] = returnFileName(**tags) # Join the resulting file name onto the asset dir path.
158 |
159 | firehawk_logger.debug( 'getAssetPath returned dir_name: {} file_name: {}'.format( tags['dir_name'], tags['file_name'] ) )
160 | return tags['dir_name'], tags['file_name']
161 |
162 | def createAssetPath(**tags): # This method should create an asset if version_str (eg 'v005') is provided, or increment an asset if version_str is 'None'. It can be patched with whatever method you wish, so long as it returns the same output. It must return dir_name file_name and version (as an int)
163 | firehawk_logger.debug( 'api: create_asset tags: {}'.format(tags) )
164 | requirements = ['job', 'seq', 'shot', 'element', 'variant', 'asset_type', 'volatile']
165 | for key in requirements:
166 | if key not in tags:
167 | firehawk_logger.warning( 'ERROR: Missing key {}'.format(key) )
168 |
169 | tags['dir_name'], tags['file_name'] = None, None
170 | if tags['volatile']=='on': tags['volatile']=True
171 | if tags['volatile']=='off': tags['volatile']=False
172 |
173 | firehawk_logger.debug( 'createAssetsFromArguments with tags: {}'.format(tags) )
174 |
175 | try:
176 | if 'version_str' in tags and tags['version_str'] is not None: # When version_str is provided, use that and don't auto increment. Otherwise, request the server to auto inc the version.
177 | firehawk_logger.debug( 'Create new asset with specified version_str: {}'.format(tags['version_str']) )
178 | tags['dir_name'], tags['version_str'] = _create_asset( tags, auto_version=False, version_str=tags['version_str'] )
179 | else:
180 | firehawk_logger.debug( 'Create new asset with incremented version from latest' )
181 | tags['dir_name'], tags['version_str'] = _create_asset( tags, auto_version=True )
182 |
183 | if 'hip' in tags:
184 | hip_asset_path = os.path.dirname( tags['hip'] )
185 | # hook to register that a hip file created an asset.
186 |
187 | except ( Exception ) as e :
188 | msg = 'ERROR: During createAssetPath. Tags used: {}'.format( tags )
189 | print( msg )
190 | firehawk_logger.warning( msg )
191 | traceback.print_exc(e)
192 | traceback.print_exc(tags)
193 | raise e
194 | # return None
195 |
196 | firehawk_logger.debug( 'Created dir_name: {} asset_version_str: {}'.format( tags['dir_name'], tags['version_str'] ) )
197 |
198 | tags['version_int'] = version_str_to_int(tags['version_str'])
199 | tags['dir_name'], tags['file_name'] = returnFileName(**tags) # this will also update the base dir for the asset.
200 | tags['extension'] = returnExtension(**tags)
201 |
202 | return tags['dir_name'], tags['file_name'], tags['version_int']
203 |
204 | def version_str_to_int(version_str, silent=False):
205 | match = re.match(r"^(v)([0-9]+)$", version_str, re.I)
206 |
207 | if match is None:
208 | if not silent: firehawk_logger.warning( 'createAssetPath returned an invalid version' )
209 | return
210 |
211 | version_int = int(match.groups()[1])
212 | return version_int
213 |
214 | def version_int_to_str(version_int):
215 | version_str = 'v'+str( version_int ).zfill(3)
216 | return version_str
--------------------------------------------------------------------------------
/scripts/modules/firehawk_asset_handler.py:
--------------------------------------------------------------------------------
1 | # This python file provides ability to patch different methods for asset creation when cooking.
2 |
3 | # We should avoid any references to hou (even PDG potentially), to provide compatibility with out of process and in process work items. We want to enable use in other platforms.
4 |
5 | # Originally this was the purpose of firehawk_submit and the origins of the code base but depending on the environment may need to be customised.
6 |
7 | # Because PDG doesn't know if it needs to request new asset paths until evaluation of a graph (using cache handlers per work item), asset creation is even less of a trivial excercise that can just occur before submission.
8 | # This outlines the steps that occur in this flow and what we now have a need to consider in PDG land:
9 |
10 | # Hip files should be saved once for submission, custom values for the submission must be updated on subsequent loads in a render queue.
11 | # On submit, we timestamp a saved file, stash it on user data at '/' and ensure the hip attrib on any work item will use this. When a file loads, in 456.py we can tell if it was intended for farm use and it should check if any dynamic values will need to be updated. This is also applied if a user manually loads that file, since its state would still not be current. Wedge override attribs, while meant to help with this efficiency problem, are too ephemeral to be useful by themselves in this scenario. We must track the data more persistently for production between hip loads. Top graphs can't be trusted to preserve data from previous cooks easily on hip load, and we also want sop nets to be able to read files independent of tops - we use a combo of multiparms and JSON sidecar files.
12 | # Before a cook, output parms on nodes (defining file paths) should be defined by expressions that can resolve to real paths if possible from prior cooks. The expressions must be set before evaluation of any related work item, since if files exist at a resolved path, cooking should be skipped because the item is regarded as cached. an exception in if a cook occurs upstream, then we should regard what is downstream as requiring updating and cook it.
13 | # If a cook will occur, we may need to request an asset is created, either with a specific version the user has set, or a version provided back to us from an asset server / other method - that could be as simple as incrementing a number from an existing dir tree. Consider as well the asset may already be created, but the dir is empty. Not fun!
14 | # When any new asset / version is retrieved, the path has changed, so we track that update by writing a side car JSON file which will be used to override a value later when the hip file is loaded.
15 | # Finally the work item is submitted to the queue!
16 | # When the timestamped hip file is loaded on a render node, we use a 456.py file to check for any sidecar JSON files in the same path with the same basename.
17 | # If any JSON files are found, we update our multiparms accordingly with version data as current as possible for the entire submission tree (we need versions upstream of the current node too). This allows a loaded hip file to always at least have the current wedge version, and if dependencies are mananged appropriately, it is also possible for the loaded hip file to have access to all submitted wedge versions in instances where all that data must be merged.
18 |
19 | import os
20 | import time
21 | import re
22 | import traceback
23 |
24 | import firehawk_plugin_loader
25 | create_asset_module = firehawk_plugin_loader.module_package('create_asset').create_asset
26 |
27 | debug_default = firehawk_plugin_loader.resolve_debug_default()
28 |
29 | from os.path import sep, join
30 | def pjoin(*args, **kwargs):
31 | return join(*args, **kwargs).replace(sep, '/') # for windows compatibility.
32 |
33 | class asset():
34 | def __init__( self,
35 | debug=debug_default,
36 | logger_object=firehawk_plugin_loader.module_package('submit_logging').submit_logging.FirehawkLogger(),
37 | start_time=None
38 | ):
39 | self.debug = debug
40 | self.dynamic_input_keys = ['seq','shot','element','variant','version_str'] # see custom_create_new_asset_version for more info on how to use these for asset creation.
41 |
42 | self.start_time = start_time # Start time is used to continue tracking of a log time
43 | self.last_time = None
44 |
45 | self.logger_object = logger_object
46 |
47 |
48 | def timeLog(self, start_time=None, label=''): # provides time passed since start time and time since last running of this method.
49 | if start_time is not None:
50 | self.start_time = start_time
51 | else:
52 | start_time = self.start_time
53 | if self.last_time is None:
54 | self.last_time = start_time
55 |
56 | message = "--- {} seconds --- Passed during Pre Submit --- {} seconds --- {}".format( '%.4f' % (time.time() - start_time), '%.4f' % (time.time() - self.last_time), label )
57 | self.infoLog( message )
58 |
59 | self.last_time = time.time()
60 |
61 | def debugLog(self, message):
62 | if self.logger_object is not None and hasattr( self.logger_object, 'debug' ):
63 | self.logger_object.debug( message )
64 | else:
65 | if self.debug>=10: print( message )
66 |
67 | def infoLog(self, message):
68 | if self.logger_object is not None and hasattr( self.logger_object, 'info' ):
69 | self.logger_object.info( message )
70 | else:
71 | if self.debug>=5: print( message )
72 |
73 | def warningLog(self, message):
74 | if self.logger_object is not None and hasattr( self.logger_object, 'warning' ):
75 | self.logger_object.warning( message )
76 | else:
77 | print( message )
78 |
79 | def getAssetPath(self, **tags): # This method should return a path and filename for an asset with the tags dict input provided.
80 | dir_name, file_name = create_asset_module.getAssetPath(**tags)
81 | return dir_name, file_name
82 |
83 | def custom_create_new_asset_version( self, tags, silent_errors=False, show_times=False, register_hip=True ):
84 | # This method should be able to:
85 | # - produce an expression for rop outputs (these use channel refs). This is an output path expression that will be applied to a node.
86 | # - request creation of new asset versions when tags have explicit values.
87 | # When both are evaluated in a houdini session they should be able to resolve to the same path on disk for a work item.
88 |
89 | self.debugLog('custom_create_new_asset_version()')
90 |
91 | if tags['format'] == 'ip': # If format is set to ip (interactive), then we set it on the output of the rop to load images directly into mplay. We shouldn't be creating any asset in this instance
92 | tags['asset_path'] = 'ip'
93 |
94 | tags = create_asset_module.rebuild_tags( tags ) # This custom method can be used to rebuild tags with reasonable defualt.
95 |
96 | if tags is None:
97 | self.warningLog( 'ERROR: No tags defined:'.format( tags ) )
98 | return None
99 | self.debugLog('tags: {}'.format( tags ))
100 |
101 | # getting all required parms
102 |
103 | self.debugLog( "create_asset disabled for this workitem in tags: {}".format( tags ) )
104 |
105 | create_asset = tags.get('create_asset', True)
106 | if not isinstance( create_asset, bool ):
107 | raise Exception('create_asset tag value is not type bool')
108 | if create_asset != True:
109 | self.debugLog( "create_asset disabled for this workitem in tags: {}".format( tags ) )
110 |
111 | scheduled = True # reutrn failed if this is occuring within a scheduler
112 | if 'scheduled' in tags: scheduled = tags['scheduled'] # scheduled assets only upversion once.
113 |
114 | version = None
115 | version_int = None
116 | version_str = None
117 |
118 | # dynamic_input_keys defined on init are the components of the expression that may change on cook of a work item and can be patched if required. That would commonly be the sequence, shot, element name, the wedge number or some other variant in the path, and the version.
119 | # We usually need to convert these to an expression on the node that channel refs the parm (varying by the wedge). tags['use_inputs_as'] == 'channels'
120 | # We also may need to pass through a preevaluated value to construct a path as a string. tags['use_inputs_as'] == 'tags'
121 | # self.dynamic_input_keys can be customised by setting it on the object.
122 |
123 | def update_tags( tags, format_str ):
124 | for key in self.dynamic_input_keys:
125 | tags[key] = format_str.format(key)
126 |
127 | if 'use_inputs_as' not in tags: # be default, evaluate path output based on tag values.
128 | tags['use_inputs_as'] = 'tags'
129 |
130 | literal_path = False
131 | if tags['use_inputs_as'] == 'attributes': # PDG attributes will be used to define output paths, generally not a good idea, since these attributes are global.
132 | update_tags( tags, '`@{}`' )
133 | elif tags['use_inputs_as'] == 'channels': # Unexpanded strings can be evaluated later by the rop itself useing its own channels
134 | update_tags( tags, '`chs("{}")`' )
135 | elif tags['use_inputs_as'] == 'expressions': # Expressions can be evaluated later by the rop itself using its own channels
136 | update_tags( tags, ' + chs("{}") + ' )
137 | elif tags['use_inputs_as'] == 'tags': # These are using tags as literal values to resolve an actual string / asset name.
138 | literal_path = True
139 | version_int = int( tags['version'] ) # version as an int should be provided and resolves to a triple padded string - version_str will be output
140 | if version_int == -1: # when version is set to -1, it will increment. Otherwise, the input will be used.
141 | tags['version_str'] = None # when version is None, asset will be created with new version.
142 | else:
143 | tags['version_str'] = 'v'+str( version_int ).zfill(3)
144 |
145 | asset_path = ''
146 | try :
147 | self.debugLog( "Check if version_str is defined: {}".format( tags.get('version_str', None) ) )
148 | if show_times: self.timeLog(label='Dynamic versioning: Prep tags to query asset path')
149 |
150 | asset_dir, asset_filename = None, None
151 | if create_asset:
152 | self.debugLog( "Ensure Asset exists" )
153 | asset_already_exists = False # Assume we need a new asset.
154 |
155 | if 'version_str' in tags and tags['version_str'] is not None: # (auto version was disabled since a version was provided) we must be creating an asset with a specific version, but check it isn't already present on disk.
156 | self.debugLog('...Using asset with specific version. Check if it exists' )
157 | asset_dir, asset_filename = create_asset_module.getAssetPath( **tags ) # retrieve the theoretical asset path, it may or may not exist, but this is used to test its existence.
158 | asset_already_exists = os.path.isdir(asset_dir) # Check existance
159 |
160 | self.debugLog( "Ensure Asset exists. asset_already_exists: {}".format(asset_already_exists) )
161 | if not asset_already_exists: # create the asset if it doesn't exist, else use the existing path.
162 | self.debugLog('...Create New Version:' )
163 | asset_dir, asset_filename, version_int = create_asset_module.createAssetPath( **tags )
164 | if show_times: self.timeLog(label='Dynamic versioning: Created new asset path')
165 | else:
166 | if show_times: self.timeLog(label='Dynamic versioning: Skipping asset creation. Already Exists')
167 | else: # We must not be wanting to create a new version, just want an existing path.
168 | self.debugLog( "Using an existing path. No new assets need to be created. value for create_asset: {}".format( create_asset ) )
169 | asset_dir, asset_filename = create_asset_module.getAssetPath( **tags )
170 | self.debugLog( "asset_dir: {}".format( asset_dir ) )
171 |
172 | if None not in [ asset_dir, asset_filename ]:
173 | asset_path = pjoin( asset_dir, asset_filename )
174 | else:
175 | self.debugLog('...Asset create did not return either of file name or dir' )
176 |
177 | except ( Exception ) as e :
178 | if silent_errors == False:
179 | import traceback
180 | self.warningLog('ERROR: During aquiring path / creation of asset. Tags used: {}'.format(tags))
181 | traceback.print_exc(e)
182 | traceback.print_exc(tags)
183 | raise e
184 | return None
185 |
186 | if literal_path:
187 | create_dir = os.path.dirname(asset_path) # the asset path exists, we just create a sub dir in there.
188 | if not os.path.isdir(create_dir): os.makedirs(create_dir)
189 |
190 | return asset_path, version_int
--------------------------------------------------------------------------------
/hda/Top_firehawklocalscheduler_1.0.0.hda/Top_1firehawklocalscheduler/DialogScript:
--------------------------------------------------------------------------------
1 | # Dialog script for firehawklocalscheduler automatically generated
2 |
3 | {
4 | name firehawklocalscheduler
5 | script firehawklocalscheduler
6 | label "Firehawk Local Scheduler"
7 |
8 | help {
9 | ""
10 | }
11 |
12 | group {
13 | name "folder0"
14 | label "Scheduler"
15 |
16 | groupsimple {
17 | name "folder4"
18 | label "Paths"
19 |
20 | parm {
21 | name "pdg_workingdir"
22 | label "Working Directory"
23 | type directory
24 | default { "$HIP" }
25 | }
26 | parm {
27 | name "pdg_mapmode"
28 | label "Path Mapping"
29 | type ordinal
30 | default { "0" }
31 | menu {
32 | "0" "Global"
33 | "1" "None"
34 | }
35 | parmtag { "script_callback_language" "python" }
36 | }
37 | parm {
38 | name "pdg_usemapzone"
39 | label "Use Path Map Zone"
40 | type toggle
41 | nolabel
42 | joinnext
43 | default { "0" }
44 | disablewhen "{ pdg_mapmode == 1 }"
45 | parmtag { "script_callback_language" "python" }
46 | }
47 | parm {
48 | name "pdg_mapzone"
49 | label "Path Map Zone"
50 | type string
51 | default { "$PDG_PATHMAP_ZONE" }
52 | disablewhen "{ pdg_usemapzone == 0 }"
53 | menureplace {
54 | "POSIX" "POSIX"
55 | "WIN" "WIN"
56 | }
57 | parmtag { "script_callback_language" "python" }
58 | }
59 | parm {
60 | name "pdg_validateoutputs"
61 | label "Validate Outputs When Recooking"
62 | type toggle
63 | default { "1" }
64 | parmtag { "script_callback_language" "python" }
65 | }
66 | parm {
67 | name "pdg_checkexpectedoutputs"
68 | label "Check Expected Outputs on Disk"
69 | type toggle
70 | default { "0" }
71 | parmtag { "script_callback" "" }
72 | parmtag { "script_callback_language" "python" }
73 | }
74 | }
75 |
76 | parm {
77 | name "socketcallbacks"
78 | label "Use Socket callbacks instead of stdout tokens"
79 | type integer
80 | invisible
81 | default { "1" }
82 | range { 0 10 }
83 | }
84 | groupsimple {
85 | name "folder5"
86 | label "Scheduling"
87 |
88 | parm {
89 | name "maxprocsmenu"
90 | label "Total Slots"
91 | type ordinal
92 | joinnext
93 | default { "0" }
94 | menu usetokenvalue {
95 | "0" "Equal to 1/4 of Total CPU Count"
96 | "-1" "Equal to CPU Count Less One"
97 | "1" "Custom Slot Count"
98 | }
99 | }
100 | parm {
101 | name "maxprocs"
102 | label "maxprocs"
103 | type integer
104 | nolabel
105 | default { "1" }
106 | disablewhen "{ maxprocsmenu != 1 }"
107 | range { -4 128 }
108 | parmtag { "autoscope" "0000000000000000" }
109 | }
110 | parm {
111 | name "verbose"
112 | label "Verbose Logging"
113 | type toggle
114 | default { "0" }
115 | parmtag { "script_callback_language" "python" }
116 | }
117 | parm {
118 | name "pdg_waitforfailures"
119 | label "Block on Failed Work Items"
120 | type toggle
121 | default { "0" }
122 | parmtag { "script_callback_language" "python" }
123 | }
124 | }
125 |
126 | groupsimple {
127 | name "folder1"
128 | label "Temp Directory"
129 |
130 | parm {
131 | name "tempdirmenu"
132 | label "Location"
133 | type ordinal
134 | default { "1" }
135 | menu {
136 | "0" "Working Directory"
137 | "1" "Houdini Temp"
138 | "2" "Custom"
139 | }
140 | }
141 | parm {
142 | name "tempdirappendpid"
143 | label "Append PID"
144 | type toggle
145 | default { "1" }
146 | }
147 | parm {
148 | name "tempdircustom"
149 | label "Custom"
150 | type directory
151 | default { "$HOUDINI_TEMP_DIR/$HIPNAME/pdgtemp" }
152 | disablewhen "{ tempdirmenu != 2 }"
153 | }
154 | }
155 |
156 | }
157 |
158 | group {
159 | name "folder0_1"
160 | label "Job Parms"
161 |
162 | groupsimple {
163 | name "folder0_2"
164 | label "Scheduling"
165 |
166 | parm {
167 | name "local_single"
168 | label "Single"
169 | type toggle
170 | default { "0" }
171 | range { 0 10 }
172 | parmtag { "pdg::scheduler" "" }
173 | }
174 | parm {
175 | name "local_is_CPU_number_set"
176 | label "local_is_CPU_number_set"
177 | type toggle
178 | nolabel
179 | joinnext
180 | default { "0" }
181 | range { 0 1 }
182 | parmtag { "pdg::scheduler" "" }
183 | }
184 | parm {
185 | name "local_CPUs_to_use"
186 | label "Slots Per Work Item"
187 | type integer
188 | default { "1" }
189 | disablewhen "{ local_is_CPU_number_set == 0 }"
190 | range { 1! 64 }
191 | parmtag { "pdg::scheduler" "" }
192 | }
193 | groupsimple {
194 | name "folder6"
195 | label "Minimum Available Memory"
196 |
197 | parm {
198 | name "local_useminfreemem"
199 | label "Rule"
200 | type ordinal
201 | default { "0" }
202 | menu {
203 | "0" "No Minimum"
204 | "1" "MB Available"
205 | "2" "Percent Available"
206 | }
207 | parmtag { "pdg::scheduler" "" }
208 | parmtag { "script_callback_language" "python" }
209 | }
210 | parm {
211 | name "local_minfreemem"
212 | label "Minimum MB"
213 | type float
214 | default { "0" }
215 | disablewhen "{ local_useminfreemem != 1 }"
216 | range { 0 128000 }
217 | parmtag { "pdg::scheduler" "" }
218 | parmtag { "script_callback_language" "python" }
219 | }
220 | parm {
221 | name "local_minfreemempct"
222 | label "Minimum Percent"
223 | type float
224 | default { "0" }
225 | disablewhen "{ local_useminfreemem != 2 }"
226 | range { 0 1 }
227 | parmtag { "pdg::scheduler" "" }
228 | parmtag { "script_callback_language" "python" }
229 | }
230 | }
231 |
232 | }
233 |
234 | groupsimple {
235 | name "folder2"
236 | label "Tasks"
237 |
238 | parm {
239 | name "local_echandleby"
240 | label "On Task Failure"
241 | type ordinal
242 | default { "0" }
243 | menu {
244 | "0" "Report Error"
245 | "1" "Report Warning"
246 | "2" "Retry Task"
247 | "3" "Ignore"
248 | }
249 | parmtag { "pdg::scheduler" "" }
250 | parmtag { "script_callback_language" "python" }
251 | }
252 | parm {
253 | name "local_echandleall"
254 | label "Handle All Non Zero"
255 | type toggle
256 | default { "1" }
257 | parmtag { "pdg::scheduler" "" }
258 | parmtag { "script_callback_language" "python" }
259 | }
260 | parm {
261 | name "local_eccustomcode"
262 | label "Exit Code"
263 | type integer
264 | default { "1" }
265 | disablewhen "{ local_echandleall != 0 }"
266 | range { 1 255 }
267 | parmtag { "pdg::scheduler" "" }
268 | parmtag { "script_callback_language" "python" }
269 | }
270 | parm {
271 | name "sepparm"
272 | label "Separator"
273 | type separator
274 | default { "" }
275 | }
276 | parm {
277 | name "local_maximumretries"
278 | label "Maximum Retries"
279 | type integer
280 | default { "3" }
281 | disablewhen "{ local_echandleby != 2 }"
282 | range { 0 10 }
283 | parmtag { "pdg::scheduler" "" }
284 | parmtag { "script_callback_language" "python" }
285 | }
286 | parm {
287 | name "local_addfailcountattr"
288 | label "Retry Count Attribute"
289 | type toggle
290 | joinnext
291 | default { "0" }
292 | disablewhen "{ local_echandleby != 2 }"
293 | parmtag { "pdg::scheduler" "" }
294 | parmtag { "script_callback_language" "python" }
295 | }
296 | parm {
297 | name "local_failcountattr"
298 | label "local_failcountattr"
299 | type string
300 | nolabel
301 | default { "failurecount" }
302 | disablewhen "{ local_addfailcountattr == 0 } { local_echandleby != 2 }"
303 | parmtag { "pdg::scheduler" "" }
304 | parmtag { "script_callback_language" "python" }
305 | }
306 | }
307 |
308 | groupsimple {
309 | name "folder3"
310 | label "Task Environment"
311 |
312 | parm {
313 | name "local_usehoudinimaxthreads"
314 | label "local_usehoudinimaxthreads"
315 | type toggle
316 | nolabel
317 | joinnext
318 | default { "0" }
319 | parmtag { "pdg::scheduler" "" }
320 | }
321 | parm {
322 | name "local_houdinimaxthreads"
323 | label "Houdini Max Threads"
324 | type integer
325 | default { "0" }
326 | disablewhen "{ local_usehoudinimaxthreads == 0 }"
327 | range { 0 10 }
328 | parmtag { "pdg::scheduler" "" }
329 | }
330 | parm {
331 | name "local_requireswindow"
332 | label "Requires GUI Window"
333 | type toggle
334 | default { "0" }
335 | parmtag { "pdg::scheduler" "" }
336 | parmtag { "script_callback_language" "python" }
337 | }
338 | parm {
339 | name "local_envunset"
340 | label "Unset Variables"
341 | type string
342 | default { "" }
343 | parmtag { "pdg::scheduler" "" }
344 | parmtag { "script_callback_language" "python" }
345 | }
346 | multiparm {
347 | name "local_envmulti"
348 | label "Environment Variables"
349 | parmtag { "pdg::nocopy" "" }
350 | parmtag { "script_callback" "" }
351 | parmtag { "script_callback_language" "hscript" }
352 |
353 | parm {
354 | name "local_envname#"
355 | label "Name"
356 | type string
357 | joinnext
358 | default { "" }
359 | parmtag { "pdg::scheduler" "" }
360 | }
361 | parm {
362 | name "local_envvalue#"
363 | label "Value"
364 | type string
365 | default { "" }
366 | parmtag { "pdg::scheduler" "" }
367 | }
368 | }
369 |
370 | }
371 |
372 | }
373 |
374 | }
375 |
--------------------------------------------------------------------------------
/scripts/modules/firehawk_read.py:
--------------------------------------------------------------------------------
1 | # A light weight library for multithreaded processes. Multithreaded ops like the custom cache handlers benefit loading these libraries rapidly.
2 |
3 | import sys, os
4 | import re
5 | import hou, pdg
6 | import time
7 |
8 | from os.path import sep, join
9 | def pjoin(*args, **kwargs):
10 | return join(*args, **kwargs).replace(sep, '/') # for windows compatibility.
11 |
12 | parmprefix='firehawk_read'
13 | spacer=''
14 | debug=0
15 | silent_errors=False
16 |
17 | version_prefix='version_'
18 | def get_version_prefix():
19 | return version_prefix
20 |
21 | def _verboseLog(args): # This is taken from scheduler.py
22 | _verbose=True
23 | if _verbose:
24 | print('{}: {}: {}{}'.format( time.strftime('%H:%M:%S', time.localtime()), parmprefix, spacer, args))
25 | sys.stdout.flush()
26 |
27 | def debugLog(message, debug=debug):
28 | if debug>=10: _verboseLog( message )
29 |
30 | def warningLog(message, debug=debug):
31 | if debug>=10: _verboseLog( message )
32 |
33 | def get_is_exempt_from_hou_node_path(work_item):
34 | exempt = False
35 | if work_item.isNoGenerate == True:
36 | print('\nExempt: isNoGenerate')
37 | exempt = True
38 | if work_item.node.topNode().type().name() in [ 'pythonscript', 'pythonprocessor' ]:
39 | print('\nExempt: pythonscript')
40 | exempt = True
41 | if exempt: print('No hou node because the work item exempt from this requirement\n')
42 | return exempt
43 |
44 | def get_hou_node_path(work_item, debug=debug):
45 | if get_is_exempt_from_hou_node_path(work_item):
46 | return
47 | hou_node_path = None
48 | work_item_node_type_name = work_item.node.topNode().type().name()
49 | accepted_list = [ 'ropfetch', 'houdiniserver' ]
50 | if work_item_node_type_name not in accepted_list: # Currently, a hou node path and versioning applies to nodes being cooked by nodes in this list. We should detect if that is not the case, since the attributes do not use the NoCopy limited scope, and can polute the network downstream.
51 | print('No hou node because the work item is not from {}. work_item_node_type_name: {}'.format( accepted_list, work_item_node_type_name ) )
52 | return
53 |
54 | if work_item_node_type_name == 'houdiniserver':
55 | hou_node_path = work_item.node.topNode().path()
56 |
57 | if work_item_node_type_name == 'ropfetch':
58 | rop_path = work_item.data.stringData('rop', 0)
59 | top_path = work_item.data.stringData('top', 0)
60 | if top_path is not None and len(top_path) and top_path in rop_path: # and hou.node(top_path): # calls to hou may be unstable.
61 | hou_node_path = top_path # If the rop is nested inside the top, apply versioning to the top node itself.
62 | else:
63 | hou_node_path = rop_path # else, apply versioning to the target, eg: rop geometry in sops.
64 |
65 | debugLog('done: {}'.format(hou_node_path), debug=debug)
66 | return hou_node_path
67 |
68 | def get_type_name(node):
69 | type_name = None
70 | if hasattr(node, 'type'):
71 | type_name = node.type().name().split('::')[0]
72 | return type_name
73 |
74 | def get_parent_top_net(parent): # first parent, will recurse from here until top net is found
75 | topnet_type_names = ['topnetmgr', 'topnet']
76 |
77 | ### Get the top net for the current work item to aquire data - for preflight and post.
78 | parent_type_name = get_type_name(parent)
79 | top_net = None
80 | if parent_type_name in topnet_type_names:
81 | top_net = parent
82 | while parent and ( get_type_name(parent) not in topnet_type_names ) :
83 | debugLog( 'get parent for: {}'.format( parent.path() ) )
84 | parent = parent.parent()
85 | parent_type_name = get_type_name(parent)
86 | if parent_type_name in topnet_type_names:
87 | top_net = parent
88 |
89 | return top_net
90 |
91 | def get_version_str(version_int):
92 | version_str = 'v'+str( version_int ).zfill(3)
93 | return version_str
94 |
95 | def get_output(hou_node, work_item=None, set_output=None, output_parm_name=None, debug=debug):
96 | # The output path parm must be set.
97 | result = None
98 | if set_output is None: set_output = getLiveParmOrAttribValue(work_item, 'set_output', debug=debug)
99 | if set_output:
100 | debugLog('...get string data', debug=debug)
101 | if output_parm_name is None:
102 |
103 | output_parm_name = work_item.data.stringData('outputparm', 0)
104 | debugLog('...get string data done', debug=debug)
105 | result = hou_node.parm(output_parm_name).unexpandedString()
106 | debugLog('...get unexpanded string data done', debug=debug)
107 | if len(result)==0: result = None
108 |
109 | return result
110 |
111 | def resolve_pdg_vars(path, work_item=None, node_path=None):
112 |
113 | if path is not None and '__PDG_DIR__' in path:
114 | debugLog( '__PDG_DIR__ in: {}'.format( path ) )
115 | pdg_dir = None
116 |
117 | if work_item is None and node_path is None:
118 | raise Exception('resolve_pdg_vars: requires either work_item or node_path')
119 |
120 | # if work_item is not None and hasattr(work_item, 'environment') and 'PDG_DIR' in work_item.environment:
121 | # pdg_dir = work_item.environment['PDG_DIR']
122 | if 'PDG_DIR' in os.environ:
123 | pdg_dir = os.environ['PDG_DIR']
124 | elif work_item is not None:
125 | import pdg
126 | pdg_dir = work_item.node.scheduler.workingDir(True)
127 | elif node_path is not None:
128 | import hou
129 | pdg_dir = hou.node(node_path).userData( 'workingdir_local' )
130 | # elif 'workingdir_local' in user_data_dict: # we can resolve the working dir for the local session as well.
131 | # pdg_dir = user_data_dict['workingdir_local']
132 |
133 | debugLog( 'PDG_DIR: {}'.format(pdg_dir) )
134 |
135 | if pdg_dir is not None and path is not None:
136 | path = path.replace( '__PDG_DIR__', pdg_dir )
137 | debugLog( 'result path: {}'.format( path ) )
138 | else:
139 | print('WARNING: No PDG_DIR found')
140 |
141 | if path is not None: # windows compatibility
142 | path = pjoin( os.path.normpath( path ) )
143 |
144 | return path
145 |
146 | def get_output_index_key_expr(hou_node, debug=debug):
147 | version_db_hou_node_path = get_version_db_hou_node_path( hou_node_path=hou_node.path() ) # the version db may not reside on the output node.
148 | index_key_parm=hou.node(version_db_hou_node_path).parm('version_db_index_key')
149 | result = None
150 | if index_key_parm: # the index key parm may not exist on the node at first use
151 | result = index_key_parm.unexpandedString()
152 | if len(result)==0: result = None
153 | return result
154 |
155 | def getLiveParmOrAttribValue(work_item, attrib_name, type='string', use_batch_parent=True, top_node_path=None, spacer='', debug=debug): # this function will aquire a parameter on the the node for a work item, a python overide dict, or an attribute, whichever is found first. this allows parms and dicts to override attributes.
156 | try:
157 | if use_batch_parent and hasattr( work_item, 'batchParent' ) and work_item.batchParent is not None:
158 | work_item = work_item.batchParent
159 |
160 | debugLog( '...getLiveParmOrAttribValue: "{}" on node: {}'.format( attrib_name, work_item.node ), debug=debug )
161 | spacer = ' '
162 |
163 | def eval_parm_on_node(parm_name, pdg_node, work_item_id, top_node_path):
164 | debugLog('eval parm on node parm_name: {}, pdg_node: {}, work_item_id: {}, top_node_path: {}'.format( parm_name, pdg_node, work_item_id, top_node_path ) , debug=debug )
165 | context = pdg_node.context
166 | work_item = context.graph.workItemById( int(work_item_id) )
167 | result=None
168 | # result defaults to input result or none.
169 | if parm_name in pdg_node.parameterNames:
170 | # Parameters take precedence over pdg attributes because it can be unique for any pdg node with less conflicts.
171 | enabled = True # if a matching parameter name is found on the top fetch node, then it will be used. if if a toggle exists, the value of the toggle will be used to determine if the parm has precedence over an attribute.
172 | if parm_name+'_enabled' in pdg_node.parameterNames:
173 | with work_item.makeActive():
174 | enabled = int( pdg_node.parameter( parm_name+'_enabled' ).evaluateInt() )
175 | if enabled:
176 | if type == 'string':
177 | debugLog( 'Parm String pdg_node: {} parm_name: {} work_item: {} \nResult:'.format( pdg_node.topNode().path(), parm_name, work_item ) , debug=debug )
178 | with work_item.makeActive():
179 | result = str( pdg_node.parameter(parm_name).evaluateString() )
180 | debugLog( result , debug=debug )
181 | elif type == 'float':
182 | debugLog( 'Parm Float Result:' )
183 | with work_item.makeActive():
184 | result = int( pdg_node.parameter(parm_name).evaluateFloat() )
185 | debugLog( result , debug=debug )
186 | elif type == 'array':
187 | debugLog( 'Parm Array Result:' , debug=debug )
188 | with work_item.makeActive():
189 | result = str( pdg_node.parameter(parm_name).evaluateFloat() )
190 | debugLog( result , debug=debug )
191 | else:
192 | debugLog( 'Parm Int Result:' , debug=debug )
193 | with work_item.makeActive():
194 | result = int( pdg_node.parameter(parm_name).evaluateInt() )
195 | debugLog( result , debug=debug )
196 | else:
197 | debugLog( 'Not evaluating custom parm. Disabled', debug=debug )
198 | debugLog( 'result: {}'.format(result) , debug=debug )
199 | # else:
200 | # self.warningLog( 'Warning: parm_name: {} not in pdg_node: {} parameterNames: {}'.format( parm_name, top_node_path, pdg_node.parameterNames ) )
201 | return result
202 |
203 | # Get overrides dictionary for python ref to upstream overrides if no parm was defined. those expressions will be evaluated in that location for the current workitem.
204 | if top_node_path is None: # Top node path can be provided to get a value from a different location and detect the correct overrides
205 | top_node_path = work_item.node.topNode().path()
206 | attrib_names = work_item.attribNames()
207 | override_present = ( 'overrides' in attrib_names )
208 |
209 | result = None # if an override node is specified, inherit atribs from that node
210 | if ( result is None ) and override_present:
211 | override_pdg_node = None
212 |
213 | overrides_attrib = work_item.attrib('overrides')
214 | if overrides_attrib.type == pdg.attribType.PyObject and overrides_attrib.object is not None:
215 | overrides_dict = overrides_attrib.object
216 | parent_matches = { x:overrides_dict[x] for x in overrides_dict if x in top_node_path } # rebuild dict for matches only.
217 | override_pdg_node_name = None
218 | if top_node_path in overrides_dict:
219 | override_pdg_node_name = overrides_dict[top_node_path]['pdg_node_name']
220 | elif len(parent_matches) > 0: # use the first nested match if any nested matches occured
221 | override_pdg_node_name = parent_matches[ next(iter( parent_matches )) ]['pdg_node_name']
222 | else:
223 | debugLog( 'top_node_path: {} not in overrides_dict: {}. \nparent_matches: {} If you intend to define an output path for this node you should use an output prep node above this node'.format( top_node_path, overrides_dict, parent_matches ) , debug=debug )
224 | spacer = ''
225 | # return
226 | if override_pdg_node_name is not None:
227 | override_pdg_node = work_item.node.context.graph.node(override_pdg_node_name) # Get the node by its name (really a path with underscores for the current graph)
228 | # if not override_pdg_node:
229 | # raise Exception( 'Error: couldnt get pdg node from work_item: {} name: {}'.format( work_item, override_pdg_node_name ) )
230 |
231 | if not override_pdg_node:
232 | debugLog( 'No pdg output override node for: {}'.format( work_item.name ) , debug=debug )
233 | spacer = ''
234 | # return
235 | else:
236 | result = eval_parm_on_node(attrib_name, override_pdg_node, work_item.id, top_node_path)
237 |
238 | if ( result is None ): # ... otherwise aquire value as parameter on the top node. if it doesn't exist, or is disabled, continue with next method.
239 | result = eval_parm_on_node(attrib_name, work_item.node, work_item.id, top_node_path)
240 |
241 | # If no other values were defined, use the attribute value as the default / fallback.
242 | if ( result is None ) and hasattr(work_item, 'data') and hasattr(work_item.data, 'allDataMap') and ( attrib_name in work_item.data.allDataMap ): # lengthy condition for h17.5 compatibility
243 | # Attributes are aquired only if there is no parameter on the pdg node by the same name.
244 | if type == 'string':
245 | result = str( work_item.attrib(attrib_name).value() )
246 | elif type == 'float':
247 | result = int( work_item.attrib(attrib_name).value() )
248 | elif type == 'array':
249 | result = work_item.attribArray(attrib_name)
250 | else:
251 | result = int( work_item.attrib(attrib_name).value() )
252 | debugLog( '...get attrib name: "{}" result: {}'.format( attrib_name, result ) , debug=debug )
253 |
254 | return result
255 |
256 | except ( Exception ) as e :
257 | import traceback
258 | warningLog( 'ERROR: During getLiveParmOrAttribValue' )
259 | traceback.print_exc(e)
260 | # raise e
261 |
262 |
263 | def get_version_db_hou_node_path(work_item=None, hou_node_path=None, debug=debug):
264 | if work_item is not None:
265 | hou_node_path = get_hou_node_path( work_item, debug=debug )
266 |
267 | # hou_node = hou.node( hou_node_path )
268 |
269 | # Optionally the version db can be located on another node to isolate any callbacks that we may not want to trigger. Do not remove.
270 | # version_db_hou_node_path = hou_node.parent().path() + '/versiondb_' + hou_node.name()
271 |
272 | # version_db_hou_node_path = hou_node.path()
273 |
274 | # return version_db_hou_node_path
275 |
276 | return hou_node_path
277 |
278 |
279 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Mozilla Public License Version 2.0
2 | ==================================
3 |
4 | 1. Definitions
5 | --------------
6 |
7 | 1.1. "Contributor"
8 | means each individual or legal entity that creates, contributes to
9 | the creation of, or owns Covered Software.
10 |
11 | 1.2. "Contributor Version"
12 | means the combination of the Contributions of others (if any) used
13 | by a Contributor and that particular Contributor's Contribution.
14 |
15 | 1.3. "Contribution"
16 | means Covered Software of a particular Contributor.
17 |
18 | 1.4. "Covered Software"
19 | means Source Code Form to which the initial Contributor has attached
20 | the notice in Exhibit A, the Executable Form of such Source Code
21 | Form, and Modifications of such Source Code Form, in each case
22 | including portions thereof.
23 |
24 | 1.5. "Incompatible With Secondary Licenses"
25 | means
26 |
27 | (a) that the initial Contributor has attached the notice described
28 | in Exhibit B to the Covered Software; or
29 |
30 | (b) that the Covered Software was made available under the terms of
31 | version 1.1 or earlier of the License, but not also under the
32 | terms of a Secondary License.
33 |
34 | 1.6. "Executable Form"
35 | means any form of the work other than Source Code Form.
36 |
37 | 1.7. "Larger Work"
38 | means a work that combines Covered Software with other material, in
39 | a separate file or files, that is not Covered Software.
40 |
41 | 1.8. "License"
42 | means this document.
43 |
44 | 1.9. "Licensable"
45 | means having the right to grant, to the maximum extent possible,
46 | whether at the time of the initial grant or subsequently, any and
47 | all of the rights conveyed by this License.
48 |
49 | 1.10. "Modifications"
50 | means any of the following:
51 |
52 | (a) any file in Source Code Form that results from an addition to,
53 | deletion from, or modification of the contents of Covered
54 | Software; or
55 |
56 | (b) any new file in Source Code Form that contains any Covered
57 | Software.
58 |
59 | 1.11. "Patent Claims" of a Contributor
60 | means any patent claim(s), including without limitation, method,
61 | process, and apparatus claims, in any patent Licensable by such
62 | Contributor that would be infringed, but for the grant of the
63 | License, by the making, using, selling, offering for sale, having
64 | made, import, or transfer of either its Contributions or its
65 | Contributor Version.
66 |
67 | 1.12. "Secondary License"
68 | means either the GNU General Public License, Version 2.0, the GNU
69 | Lesser General Public License, Version 2.1, the GNU Affero General
70 | Public License, Version 3.0, or any later versions of those
71 | licenses.
72 |
73 | 1.13. "Source Code Form"
74 | means the form of the work preferred for making modifications.
75 |
76 | 1.14. "You" (or "Your")
77 | means an individual or a legal entity exercising rights under this
78 | License. For legal entities, "You" includes any entity that
79 | controls, is controlled by, or is under common control with You. For
80 | purposes of this definition, "control" means (a) the power, direct
81 | or indirect, to cause the direction or management of such entity,
82 | whether by contract or otherwise, or (b) ownership of more than
83 | fifty percent (50%) of the outstanding shares or beneficial
84 | ownership of such entity.
85 |
86 | 2. License Grants and Conditions
87 | --------------------------------
88 |
89 | 2.1. Grants
90 |
91 | Each Contributor hereby grants You a world-wide, royalty-free,
92 | non-exclusive license:
93 |
94 | (a) under intellectual property rights (other than patent or trademark)
95 | Licensable by such Contributor to use, reproduce, make available,
96 | modify, display, perform, distribute, and otherwise exploit its
97 | Contributions, either on an unmodified basis, with Modifications, or
98 | as part of a Larger Work; and
99 |
100 | (b) under Patent Claims of such Contributor to make, use, sell, offer
101 | for sale, have made, import, and otherwise transfer either its
102 | Contributions or its Contributor Version.
103 |
104 | 2.2. Effective Date
105 |
106 | The licenses granted in Section 2.1 with respect to any Contribution
107 | become effective for each Contribution on the date the Contributor first
108 | distributes such Contribution.
109 |
110 | 2.3. Limitations on Grant Scope
111 |
112 | The licenses granted in this Section 2 are the only rights granted under
113 | this License. No additional rights or licenses will be implied from the
114 | distribution or licensing of Covered Software under this License.
115 | Notwithstanding Section 2.1(b) above, no patent license is granted by a
116 | Contributor:
117 |
118 | (a) for any code that a Contributor has removed from Covered Software;
119 | or
120 |
121 | (b) for infringements caused by: (i) Your and any other third party's
122 | modifications of Covered Software, or (ii) the combination of its
123 | Contributions with other software (except as part of its Contributor
124 | Version); or
125 |
126 | (c) under Patent Claims infringed by Covered Software in the absence of
127 | its Contributions.
128 |
129 | This License does not grant any rights in the trademarks, service marks,
130 | or logos of any Contributor (except as may be necessary to comply with
131 | the notice requirements in Section 3.4).
132 |
133 | 2.4. Subsequent Licenses
134 |
135 | No Contributor makes additional grants as a result of Your choice to
136 | distribute the Covered Software under a subsequent version of this
137 | License (see Section 10.2) or under the terms of a Secondary License (if
138 | permitted under the terms of Section 3.3).
139 |
140 | 2.5. Representation
141 |
142 | Each Contributor represents that the Contributor believes its
143 | Contributions are its original creation(s) or it has sufficient rights
144 | to grant the rights to its Contributions conveyed by this License.
145 |
146 | 2.6. Fair Use
147 |
148 | This License is not intended to limit any rights You have under
149 | applicable copyright doctrines of fair use, fair dealing, or other
150 | equivalents.
151 |
152 | 2.7. Conditions
153 |
154 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
155 | in Section 2.1.
156 |
157 | 3. Responsibilities
158 | -------------------
159 |
160 | 3.1. Distribution of Source Form
161 |
162 | All distribution of Covered Software in Source Code Form, including any
163 | Modifications that You create or to which You contribute, must be under
164 | the terms of this License. You must inform recipients that the Source
165 | Code Form of the Covered Software is governed by the terms of this
166 | License, and how they can obtain a copy of this License. You may not
167 | attempt to alter or restrict the recipients' rights in the Source Code
168 | Form.
169 |
170 | 3.2. Distribution of Executable Form
171 |
172 | If You distribute Covered Software in Executable Form then:
173 |
174 | (a) such Covered Software must also be made available in Source Code
175 | Form, as described in Section 3.1, and You must inform recipients of
176 | the Executable Form how they can obtain a copy of such Source Code
177 | Form by reasonable means in a timely manner, at a charge no more
178 | than the cost of distribution to the recipient; and
179 |
180 | (b) You may distribute such Executable Form under the terms of this
181 | License, or sublicense it under different terms, provided that the
182 | license for the Executable Form does not attempt to limit or alter
183 | the recipients' rights in the Source Code Form under this License.
184 |
185 | 3.3. Distribution of a Larger Work
186 |
187 | You may create and distribute a Larger Work under terms of Your choice,
188 | provided that You also comply with the requirements of this License for
189 | the Covered Software. If the Larger Work is a combination of Covered
190 | Software with a work governed by one or more Secondary Licenses, and the
191 | Covered Software is not Incompatible With Secondary Licenses, this
192 | License permits You to additionally distribute such Covered Software
193 | under the terms of such Secondary License(s), so that the recipient of
194 | the Larger Work may, at their option, further distribute the Covered
195 | Software under the terms of either this License or such Secondary
196 | License(s).
197 |
198 | 3.4. Notices
199 |
200 | You may not remove or alter the substance of any license notices
201 | (including copyright notices, patent notices, disclaimers of warranty,
202 | or limitations of liability) contained within the Source Code Form of
203 | the Covered Software, except that You may alter any license notices to
204 | the extent required to remedy known factual inaccuracies.
205 |
206 | 3.5. Application of Additional Terms
207 |
208 | You may choose to offer, and to charge a fee for, warranty, support,
209 | indemnity or liability obligations to one or more recipients of Covered
210 | Software. However, You may do so only on Your own behalf, and not on
211 | behalf of any Contributor. You must make it absolutely clear that any
212 | such warranty, support, indemnity, or liability obligation is offered by
213 | You alone, and You hereby agree to indemnify every Contributor for any
214 | liability incurred by such Contributor as a result of warranty, support,
215 | indemnity or liability terms You offer. You may include additional
216 | disclaimers of warranty and limitations of liability specific to any
217 | jurisdiction.
218 |
219 | 4. Inability to Comply Due to Statute or Regulation
220 | ---------------------------------------------------
221 |
222 | If it is impossible for You to comply with any of the terms of this
223 | License with respect to some or all of the Covered Software due to
224 | statute, judicial order, or regulation then You must: (a) comply with
225 | the terms of this License to the maximum extent possible; and (b)
226 | describe the limitations and the code they affect. Such description must
227 | be placed in a text file included with all distributions of the Covered
228 | Software under this License. Except to the extent prohibited by statute
229 | or regulation, such description must be sufficiently detailed for a
230 | recipient of ordinary skill to be able to understand it.
231 |
232 | 5. Termination
233 | --------------
234 |
235 | 5.1. The rights granted under this License will terminate automatically
236 | if You fail to comply with any of its terms. However, if You become
237 | compliant, then the rights granted under this License from a particular
238 | Contributor are reinstated (a) provisionally, unless and until such
239 | Contributor explicitly and finally terminates Your grants, and (b) on an
240 | ongoing basis, if such Contributor fails to notify You of the
241 | non-compliance by some reasonable means prior to 60 days after You have
242 | come back into compliance. Moreover, Your grants from a particular
243 | Contributor are reinstated on an ongoing basis if such Contributor
244 | notifies You of the non-compliance by some reasonable means, this is the
245 | first time You have received notice of non-compliance with this License
246 | from such Contributor, and You become compliant prior to 30 days after
247 | Your receipt of the notice.
248 |
249 | 5.2. If You initiate litigation against any entity by asserting a patent
250 | infringement claim (excluding declaratory judgment actions,
251 | counter-claims, and cross-claims) alleging that a Contributor Version
252 | directly or indirectly infringes any patent, then the rights granted to
253 | You by any and all Contributors for the Covered Software under Section
254 | 2.1 of this License shall terminate.
255 |
256 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all
257 | end user license agreements (excluding distributors and resellers) which
258 | have been validly granted by You or Your distributors under this License
259 | prior to termination shall survive termination.
260 |
261 | ************************************************************************
262 | * *
263 | * 6. Disclaimer of Warranty *
264 | * ------------------------- *
265 | * *
266 | * Covered Software is provided under this License on an "as is" *
267 | * basis, without warranty of any kind, either expressed, implied, or *
268 | * statutory, including, without limitation, warranties that the *
269 | * Covered Software is free of defects, merchantable, fit for a *
270 | * particular purpose or non-infringing. The entire risk as to the *
271 | * quality and performance of the Covered Software is with You. *
272 | * Should any Covered Software prove defective in any respect, You *
273 | * (not any Contributor) assume the cost of any necessary servicing, *
274 | * repair, or correction. This disclaimer of warranty constitutes an *
275 | * essential part of this License. No use of any Covered Software is *
276 | * authorized under this License except under this disclaimer. *
277 | * *
278 | ************************************************************************
279 |
280 | ************************************************************************
281 | * *
282 | * 7. Limitation of Liability *
283 | * -------------------------- *
284 | * *
285 | * Under no circumstances and under no legal theory, whether tort *
286 | * (including negligence), contract, or otherwise, shall any *
287 | * Contributor, or anyone who distributes Covered Software as *
288 | * permitted above, be liable to You for any direct, indirect, *
289 | * special, incidental, or consequential damages of any character *
290 | * including, without limitation, damages for lost profits, loss of *
291 | * goodwill, work stoppage, computer failure or malfunction, or any *
292 | * and all other commercial damages or losses, even if such party *
293 | * shall have been informed of the possibility of such damages. This *
294 | * limitation of liability shall not apply to liability for death or *
295 | * personal injury resulting from such party's negligence to the *
296 | * extent applicable law prohibits such limitation. Some *
297 | * jurisdictions do not allow the exclusion or limitation of *
298 | * incidental or consequential damages, so this exclusion and *
299 | * limitation may not apply to You. *
300 | * *
301 | ************************************************************************
302 |
303 | 8. Litigation
304 | -------------
305 |
306 | Any litigation relating to this License may be brought only in the
307 | courts of a jurisdiction where the defendant maintains its principal
308 | place of business and such litigation shall be governed by laws of that
309 | jurisdiction, without reference to its conflict-of-law provisions.
310 | Nothing in this Section shall prevent a party's ability to bring
311 | cross-claims or counter-claims.
312 |
313 | 9. Miscellaneous
314 | ----------------
315 |
316 | This License represents the complete agreement concerning the subject
317 | matter hereof. If any provision of this License is held to be
318 | unenforceable, such provision shall be reformed only to the extent
319 | necessary to make it enforceable. Any law or regulation which provides
320 | that the language of a contract shall be construed against the drafter
321 | shall not be used to construe this License against a Contributor.
322 |
323 | 10. Versions of the License
324 | ---------------------------
325 |
326 | 10.1. New Versions
327 |
328 | Mozilla Foundation is the license steward. Except as provided in Section
329 | 10.3, no one other than the license steward has the right to modify or
330 | publish new versions of this License. Each version will be given a
331 | distinguishing version number.
332 |
333 | 10.2. Effect of New Versions
334 |
335 | You may distribute the Covered Software under the terms of the version
336 | of the License under which You originally received the Covered Software,
337 | or under the terms of any subsequent version published by the license
338 | steward.
339 |
340 | 10.3. Modified Versions
341 |
342 | If you create software not governed by this License, and you want to
343 | create a new license for such software, you may create and use a
344 | modified version of this License if you rename the license and remove
345 | any references to the name of the license steward (except to note that
346 | such modified license differs from this License).
347 |
348 | 10.4. Distributing Source Code Form that is Incompatible With Secondary
349 | Licenses
350 |
351 | If You choose to distribute Source Code Form that is Incompatible With
352 | Secondary Licenses under the terms of this version of the License, the
353 | notice described in Exhibit B of this License must be attached.
354 |
355 | Exhibit A - Source Code Form License Notice
356 | -------------------------------------------
357 |
358 | This Source Code Form is subject to the terms of the Mozilla Public
359 | License, v. 2.0. If a copy of the MPL was not distributed with this
360 | file, You can obtain one at http://mozilla.org/MPL/2.0/.
361 |
362 | If it is not possible or desirable to put the notice in a particular
363 | file, then You may include the notice in a location (such as a LICENSE
364 | file in a relevant directory) where a recipient would be likely to look
365 | for such a notice.
366 |
367 | You may add additional accurate notices of copyright ownership.
368 |
369 | Exhibit B - "Incompatible With Secondary Licenses" Notice
370 | ---------------------------------------------------------
371 |
372 | This Source Code Form is "Incompatible With Secondary Licenses", as
373 | defined by the Mozilla Public License, v. 2.0.
374 |
--------------------------------------------------------------------------------